How to use the pyflink.java_gateway.get_gateway function in pyflink

To help you get started, we’ve selected a few pyflink examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github apache / flink / flink-python / pyflink / testing / source_sink_utils.py View on Github external
def __init__(self, field_names, field_types):
        TestTableSink._ensure_initialized()

        gateway = get_gateway()
        super(TestRetractSink, self).__init__(
            gateway.jvm.TestRetractSink(), field_names, field_types)
github apache / flink / flink-python / pyflink / table / descriptors.py View on Github external
def failure_handler_custom(self, failure_handler_class_name):
        """
        Configures a failure handling strategy in case a request to Elasticsearch fails.

        This strategy allows for custom failure handling using a ``ActionRequestFailureHandler``.

        :param failure_handler_class_name:
        :return: This object.
        """
        gateway = get_gateway()
        self._j_elasticsearch = self._j_elasticsearch.failureHandlerCustom(
            gateway.jvm.Thread.currentThread().getContextClassLoader()
                   .loadClass(failure_handler_class_name))
        return self
github apache / flink / flink-python / pyflink / table / types.py View on Github external
def _from_java_type(j_data_type):
    gateway = get_gateway()

    if _is_instance_of(j_data_type, gateway.jvm.TypeInformation):
        # input is TypeInformation
        LegacyTypeInfoDataTypeConverter = \
            gateway.jvm.org.apache.flink.table.types.utils.LegacyTypeInfoDataTypeConverter
        java_data_type = LegacyTypeInfoDataTypeConverter.toDataType(j_data_type)
    else:
        # input is DataType
        java_data_type = j_data_type

    # Atomic Type with parameters.
    if _is_instance_of(java_data_type, gateway.jvm.AtomicDataType):
        logical_type = java_data_type.getLogicalType()
        if _is_instance_of(logical_type, gateway.jvm.CharType):
            data_type = DataTypes.CHAR(logical_type.getLength(), logical_type.isNullable())
        elif _is_instance_of(logical_type, gateway.jvm.VarCharType):
github axbaretto / flink / flink-python / pyflink / table / types.py View on Github external
def _is_instance_of(java_data_type, java_class):
    gateway = get_gateway()
    if isinstance(java_class, basestring):
        param = java_class
    elif isinstance(java_class, JavaClass):
        param = get_java_class(java_class)
    elif isinstance(java_class, JavaObject):
        if not _is_instance_of(java_class, gateway.jvm.Class):
            param = java_class.getClass()
        else:
            param = java_class
    else:
        raise TypeError(
            "java_class must be a string, a JavaClass, or a JavaObject")

    return gateway.jvm.org.apache.flink.api.python.shaded.py4j.reflection.TypeUtil.isInstanceOf(
        param, java_data_type)
github apache / flink / flink-python / pyflink / table / types.py View on Github external
def _to_java_type(data_type):
    """
    Converts Python type to Java type.
    """

    global _python_java_types_mapping
    global _python_java_types_mapping_lock

    gateway = get_gateway()
    Types = gateway.jvm.org.apache.flink.table.api.Types

    if _python_java_types_mapping is None:
        with _python_java_types_mapping_lock:
            _python_java_types_mapping = {
                BooleanType: Types.BOOLEAN(),
                TinyIntType: Types.BYTE(),
                SmallIntType: Types.SHORT(),
                IntType: Types.INT(),
                BigIntType: Types.LONG(),
                FloatType: Types.FLOAT(),
                DoubleType: Types.DOUBLE(),
                DecimalType: Types.DECIMAL(),
                DateType: Types.SQL_DATE(),
                TimeType: Types.SQL_TIME(),
                TimestampType: Types.SQL_TIMESTAMP(),
github alibaba / flink-ai-extended / flink-ml-tensorflow / python / flink_ml_tensorflow / tensorflow_on_flink_tfconf.py View on Github external
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================

from pyflink.java_gateway import get_gateway

class TFCONSTANS(object):

    J_CONSTANTS = get_gateway().jvm.com.alibaba.flink.ml.tensorflow.util.TFConstants

    TF_PORT = str(J_CONSTANTS.TF_PORT)
    TF_INFERENCE_EXPORT_PATH = str(J_CONSTANTS.TF_INFERENCE_EXPORT_PATH)
    TF_INFERENCE_INPUT_TENSOR_NAMES = str(J_CONSTANTS.TF_INFERENCE_INPUT_TENSOR_NAMES)
    TF_INFERENCE_OUTPUT_TENSOR_NAMES = str(J_CONSTANTS.TF_INFERENCE_OUTPUT_TENSOR_NAMES)
    TF_INFERENCE_OUTPUT_ROW_FIELDS = str(J_CONSTANTS.TF_INFERENCE_OUTPUT_ROW_FIELDS)
    TF_INFERENCE_BATCH_SIZE = str(J_CONSTANTS.TF_INFERENCE_BATCH_SIZE)
    TF_IS_CHIEF_ALONE = str(J_CONSTANTS.TF_IS_CHIEF_ALONE)
    TF_IS_CHIEF_ROLE = str(J_CONSTANTS.TF_IS_CHIEF_ROLE)
    TENSORBOART_PORT = str(J_CONSTANTS.TENSORBOART_PORT)
    INPUT_TF_EXAMPLE_CONFIG = str(J_CONSTANTS.INPUT_TF_EXAMPLE_CONFIG)
    OUTPUT_TF_EXAMPLE_CONFIG = str(J_CONSTANTS.OUTPUT_TF_EXAMPLE_CONFIG)
github apache / flink / flink-python / pyflink / table / sinks.py View on Github external
def __init__(self, field_names, field_types, path, field_delimiter=',', num_files=-1,
                 write_mode=None):
        # type: (list[str], list[DataType], str, str, int, int) -> None
        gateway = get_gateway()
        if write_mode == WriteMode.NO_OVERWRITE:
            j_write_mode = gateway.jvm.org.apache.flink.core.fs.FileSystem.WriteMode.NO_OVERWRITE
        elif write_mode == WriteMode.OVERWRITE:
            j_write_mode = gateway.jvm.org.apache.flink.core.fs.FileSystem.WriteMode.OVERWRITE
        elif write_mode is None:
            j_write_mode = None
        else:
            raise Exception('Unsupported write_mode: %s' % write_mode)
        j_csv_table_sink = gateway.jvm.CsvTableSink(
            path, field_delimiter, num_files, j_write_mode)
        j_field_names = utils.to_jarray(gateway.jvm.String, field_names)
        j_field_types = utils.to_jarray(gateway.jvm.TypeInformation,
                                        [_to_java_type(field_type) for field_type in field_types])
        j_csv_table_sink = j_csv_table_sink.configure(j_field_names, j_field_types)
        super(CsvTableSink, self).__init__(j_csv_table_sink)
github axbaretto / flink / flink-python / pyflink / dataset / execution_environment.py View on Github external
def get_execution_environment():
        """
        Creates an execution environment that represents the context in which the program is
        currently executed. If the program is invoked standalone, this method returns a local
        execution environment. If the program is invoked from within the command line client to be
        submitted to a cluster, this method returns the execution environment of this cluster.

        :return: The :class:`ExecutionEnvironment` of the context in which the program is executed.
        """
        gateway = get_gateway()
        j_execution_environment = gateway.jvm.org.apache.flink.api.java.ExecutionEnvironment\
            .getExecutionEnvironment()
        return ExecutionEnvironment(j_execution_environment)
github apache / flink / flink-python / pyflink / datastream / state_backend.py View on Github external
"""
        Gets the current predefined options for RocksDB.
        The default options (if nothing was set via :func:`setPredefinedOptions`)
        are :data:`PredefinedOptions.DEFAULT`.

        If user-configured options within ``RocksDBConfigurableOptions`` is set (through
        flink-conf.yaml) or a user-defined options factory is set (via :func:`setOptions`),
        then the options from the factory are applied on top of the predefined and customized
        options.

        .. seealso:: :func:`set_predefined_options`

        :return: Current predefined options.
        """
        j_predefined_options = self._j_rocks_db_state_backend.getPredefinedOptions()
        gateway = get_gateway()
        JPredefinedOptions = gateway.jvm.org.apache.flink.contrib.streaming.state.PredefinedOptions
        if j_predefined_options == JPredefinedOptions.DEFAULT:
            return PredefinedOptions.DEFAULT
        elif j_predefined_options == JPredefinedOptions.FLASH_SSD_OPTIMIZED:
            return PredefinedOptions.FLASH_SSD_OPTIMIZED
        elif j_predefined_options == JPredefinedOptions.SPINNING_DISK_OPTIMIZED:
            return PredefinedOptions.SPINNING_DISK_OPTIMIZED
        elif j_predefined_options == JPredefinedOptions.SPINNING_DISK_OPTIMIZED_HIGH_MEM:
            return PredefinedOptions.SPINNING_DISK_OPTIMIZED_HIGH_MEM
        else:
            raise Exception("Unsupported java options: %s" % j_predefined_options)
github apache / flink / flink-python / pyflink / table / descriptors.py View on Github external
def __init__(self):
        gateway = get_gateway()
        self._j_kafka = gateway.jvm.Kafka()
        super(Kafka, self).__init__(self._j_kafka)