# SqlAlchemy supports many different database engine, more information # their website # sql_alchemy_conn = sqlite:////tmp/airflow.db # The encoding for the databases sql_engine_encoding = utf-8 # If SqlAlchemy should pool database connections. Choices include # SequentialExecutor, LocalExecutor, CeleryExecutor, DaskExecutor, KubernetesExecutor executor = SequentialExecutor # The SqlAlchemy connection string to the metadata database. Europe/Amsterdam) default_timezone = utc # The executor class that airflow should use. # If using IP address as hostname is preferred, use value ``:get_host_ip_address`` hostname_callable = socket:getfqdn # Default timezone in case supplied date times are naive # can be utc (default), system, or any IANA timezone string (e.g. # No argument should be required in the function specified. # For example, default value "socket:getfqdn" means that result from getfqdn() of "socket" # package will be used as hostname. task_log_reader = task # Hostname by providing a path to a callable, which will resolve the hostname. log dag_processor_manager_log_location = /usr/local/airflow/logs/dag_processor_manager/dag_processor_manager.log # Name of handler to read task instance logs. colored_console_log = True # Log format for when Colored logs is enabled colored_log_format =. remote_log_conn_id = remote_base_log_folder = encrypt_s3_logs = False # Logging level logging_level = INFO # Logging level for Flask-appbuilder UI fab_logging_level = WARN # Logging class # Specify the class that will specify the logging configuration # This class has to be on the python classpath # Example: logging_config_class = my.fault_local_settings.LOGGING_CONFIG logging_config_class = # Flag to enable/disable Colored logs in Console # Colour the logs when the controlling terminal is a TTY. remote_logging = False # Users must supply an Airflow connection id that provides access to the storage # location. # Set this to True if you want to enable remote logging. dags_folder = /usr/local/airflow/dags # The folder where airflow should store its log files # This path must be absolute base_log_folder = /usr/local/airflow/logs # Airflow can store logs remotely in AWS S3, Google Cloud Storage or Elastic Search. # The folder where your airflow pipelines live, most likely a # subfolder in a code repository.
0 Comments
Leave a Reply. |
AuthorWrite something about yourself. No need to be fancy, just an overview. ArchivesCategories |