Commit 61dd22b699bd8e62bdbe452b34f7d852b0811522

Authored by 芯火源
1 parent f5e5f3a3

refactor: 恢复配置文件

@@ -28,9 +28,9 @@ server: @@ -28,9 +28,9 @@ server:
28 # Zookeeper connection parameters. Used for service discovery. 28 # Zookeeper connection parameters. Used for service discovery.
29 zk: 29 zk:
30 # Enable/disable zookeeper discovery service. 30 # Enable/disable zookeeper discovery service.
31 - enabled: "${ZOOKEEPER_ENABLED:true}" 31 + enabled: "${ZOOKEEPER_ENABLED:false}"
32 # Zookeeper connect string 32 # Zookeeper connect string
33 - url: "${ZOOKEEPER_URL:123.60.37.18:22181,124.70.179.189:22181,124.71.191.130:22181}" 33 + url: "${ZOOKEEPER_URL:localhost:2181}"
34 # Zookeeper retry interval in milliseconds 34 # Zookeeper retry interval in milliseconds
35 retry_interval_ms: "${ZOOKEEPER_RETRY_INTERVAL_MS:3000}" 35 retry_interval_ms: "${ZOOKEEPER_RETRY_INTERVAL_MS:3000}"
36 # Zookeeper connection timeout in milliseconds 36 # Zookeeper connection timeout in milliseconds
@@ -61,14 +61,14 @@ redis: @@ -61,14 +61,14 @@ redis:
61 usePoolConfig: "${REDIS_CLIENT_USE_POOL_CONFIG:false}" 61 usePoolConfig: "${REDIS_CLIENT_USE_POOL_CONFIG:false}"
62 cluster: 62 cluster:
63 # Comma-separated list of "host:port" pairs to bootstrap from. 63 # Comma-separated list of "host:port" pairs to bootstrap from.
64 - nodes: "${REDIS_NODES:123.60.37.18:6379,124.70.179.189:6379,124.71.191.130:6379}" 64 + nodes: "${REDIS_NODES:}"
65 # Maximum number of redirects to follow when executing commands across the cluster. 65 # Maximum number of redirects to follow when executing commands across the cluster.
66 max-redirects: "${REDIS_MAX_REDIRECTS:12}" 66 max-redirects: "${REDIS_MAX_REDIRECTS:12}"
67 useDefaultPoolConfig: "${REDIS_USE_DEFAULT_POOL_CONFIG:true}" 67 useDefaultPoolConfig: "${REDIS_USE_DEFAULT_POOL_CONFIG:true}"
68 # db index 68 # db index
69 - db: "${REDIS_DB:15}" 69 + db: "${REDIS_DB:0}"
70 # db password 70 # db password
71 - password: "${REDIS_PASSWORD:redis@6379}" 71 + password: "${REDIS_PASSWORD:}"
72 # pool config 72 # pool config
73 pool_config: 73 pool_config:
74 maxTotal: "${REDIS_POOL_CONFIG_MAX_TOTAL:128}" 74 maxTotal: "${REDIS_POOL_CONFIG_MAX_TOTAL:128}"
@@ -190,7 +190,7 @@ transport: @@ -190,7 +190,7 @@ transport:
190 queue: 190 queue:
191 type: "${TB_QUEUE_TYPE:kafka}" # kafka (Apache Kafka) or aws-sqs (AWS SQS) or pubsub (PubSub) or service-bus (Azure Service Bus) or rabbitmq (RabbitMQ) 191 type: "${TB_QUEUE_TYPE:kafka}" # kafka (Apache Kafka) or aws-sqs (AWS SQS) or pubsub (PubSub) or service-bus (Azure Service Bus) or rabbitmq (RabbitMQ)
192 kafka: 192 kafka:
193 - bootstrap.servers: "${TB_KAFKA_SERVERS:139.198.106.153:9092}" 193 + bootstrap.servers: "${TB_KAFKA_SERVERS:localhost:9092}"
194 acks: "${TB_KAFKA_ACKS:all}" 194 acks: "${TB_KAFKA_ACKS:all}"
195 retries: "${TB_KAFKA_RETRIES:1}" 195 retries: "${TB_KAFKA_RETRIES:1}"
196 batch.size: "${TB_KAFKA_BATCH_SIZE:16384}" 196 batch.size: "${TB_KAFKA_BATCH_SIZE:16384}"
@@ -201,10 +201,10 @@ queue: @@ -201,10 +201,10 @@ queue:
201 replication_factor: "${TB_QUEUE_KAFKA_REPLICATION_FACTOR:1}" 201 replication_factor: "${TB_QUEUE_KAFKA_REPLICATION_FACTOR:1}"
202 use_confluent_cloud: "${TB_QUEUE_KAFKA_USE_CONFLUENT_CLOUD:false}" 202 use_confluent_cloud: "${TB_QUEUE_KAFKA_USE_CONFLUENT_CLOUD:false}"
203 confluent: 203 confluent:
204 - ssl.algorithm: "${TB_QUEUE_KAFKA_CONFLUENT_SSL_ALGORITHM:http}"  
205 - sasl.mechanism: "${TB_QUEUE_KAFKA_CONFLUENT_SASL_MECHANISM:SCRAM-SHA-512}"  
206 - sasl.config: "${TB_QUEUE_KAFKA_CONFLUENT_SASL_JAAS_CONFIG:org.apache.kafka.common.security.scram.ScramLoginModule required username=\"thingskit\" password=\"thingskit\";}"  
207 - security.protocol: "${TB_QUEUE_KAFKA_CONFLUENT_SECURITY_PROTOCOL:SASL_PLAINTEXT}" 204 + ssl.algorithm: "${TB_QUEUE_KAFKA_CONFLUENT_SSL_ALGORITHM:https}"
  205 + sasl.mechanism: "${TB_QUEUE_KAFKA_CONFLUENT_SASL_MECHANISM:PLAIN}"
  206 + sasl.config: "${TB_QUEUE_KAFKA_CONFLUENT_SASL_JAAS_CONFIG:org.apache.kafka.common.security.plain.PlainLoginModule required username=\"CLUSTER_API_KEY\" password=\"CLUSTER_API_SECRET\";}"
  207 + security.protocol: "${TB_QUEUE_KAFKA_CONFLUENT_SECURITY_PROTOCOL:SASL_SSL}"
208 other: # In this section you can specify custom parameters for Kafka consumer/producer and expose the env variables to configure outside 208 other: # In this section you can specify custom parameters for Kafka consumer/producer and expose the env variables to configure outside
209 - key: "request.timeout.ms" # refer to https://docs.confluent.io/platform/current/installation/configuration/producer-configs.html#producerconfigs_request.timeout.ms 209 - key: "request.timeout.ms" # refer to https://docs.confluent.io/platform/current/installation/configuration/producer-configs.html#producerconfigs_request.timeout.ms
210 value: "${TB_QUEUE_KAFKA_REQUEST_TIMEOUT_MS:30000}" # (30 seconds) 210 value: "${TB_QUEUE_KAFKA_REQUEST_TIMEOUT_MS:30000}" # (30 seconds)