diff --git a/broker-jaas.conf b/broker-jaas.conf new file mode 100644 index 00000000..dd800453 --- /dev/null +++ b/broker-jaas.conf @@ -0,0 +1,13 @@ +KafkaServer { + org.apache.kafka.common.security.plain.PlainLoginModule required + username="admin" + password="admin-secret" + user_admin="admin-secret"; + }; + +Client { + org.apache.zookeeper.server.auth.DigestLoginModule required + username="admin" + password="admin-secret" + user_admin="admin-secret"; + }; diff --git a/data-collector/README.md b/data-collector/README.md index 90c1906c..33ea8bb5 100644 --- a/data-collector/README.md +++ b/data-collector/README.md @@ -17,6 +17,8 @@ By default, this component will validate that hosts in the `KAFKA_BOOTSTRAP_TLS` If your Kafka cluster uses TLS certificates issued by a private Certificate Authority, you will need to provide the CA Certificate in PEM format so that certificate validation can be performed when connecting to the Kafka cluster. You should do this by including the CA certificate in PEM format in the `/tls` directory of the container, probably through a volume mount. +At present Kiln supports authentication between brokers and producers/consumers using the SASL_PLAIN mechanism. Authentication is optional and configured by setting the `ENABLE_KAFKA_AUTH` environment variable. If this variable is set, you also need to supply the username and password for authentication using `KAFKA_SASL_AUTH_USERNAME` and `KAFKA_SASL_AUTH_PASSWORD` environment variables respectively. + ## Request & Response Documentation You shouldn't generally need to make manual requests to the data-collector, instead prefer to use the ToolReport struct from kiln_lib and serialise that to JSON before sending to the data-collector. If you do need to make a manual request to the data-collector, see [docs/request-response.md](docs/request-response.md). diff --git a/data-forwarder/Cargo.lock b/data-forwarder/Cargo.lock index 8c34d01d..a320c1aa 100644 --- a/data-forwarder/Cargo.lock +++ b/data-forwarder/Cargo.lock @@ -310,9 +310,9 @@ checksum = "f6503fe142514ca4799d4c26297c4248239fe8838d827db6bd6065c6ed29a6ce" [[package]] name = "git2" -version = "0.13.19" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17929de7239dea9f68aa14f94b2ab4974e7b24c1314275ffcc12a7758172fa18" +checksum = "26e07ef27260a78f7e8d218ebac2c72f2c4db50493741b190b6e8eade1da7c68" dependencies = [ "bitflags", "libc", @@ -491,11 +491,12 @@ dependencies = [ [[package]] name = "kiln_lib" version = "0.5.0" -source = "git+https://github.com/simplybusiness/Kiln?branch=main#a91159e65c69eb9f4a9b59c4f4351d36c1fbedef" +source = "git+https://github.com/simplybusiness/Kiln?branch=main#884e96059622c72e99254ac737bee25aee964adf" dependencies = [ "anyhow", "chrono", "hex", + "mime", "openssl-probe", "regex", "ring", @@ -521,9 +522,9 @@ checksum = "ba4aede83fc3617411dc6993bc8c70919750c1c257c6ca6a502aed6e0e2394ae" [[package]] name = "libgit2-sys" -version = "0.12.20+1.1.0" +version = "0.11.0+0.99.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e2f09917e00b9ad194ae72072bb5ada2cca16d8171a43e91ddba2afbb02664b" +checksum = "4d5d1459353d397a029fb18862166338de938e6be976606bd056cf8f1a912ecf" dependencies = [ "cc", "libc", diff --git a/data-forwarder/Cargo.toml b/data-forwarder/Cargo.toml index a3d65865..5c31bedc 100644 --- a/data-forwarder/Cargo.toml +++ b/data-forwarder/Cargo.toml @@ -8,7 +8,7 @@ kiln_lib = { git = "https://github.com/simplybusiness/Kiln", features = [ "json" clap = "2" chrono = "0.4" reqwest = { version = "0.11", features = [ "blocking", "json",] } -git2 = "0.13" +git2 = "0.12" uuid = { version = "0.8", features = [ "v4",] } openssl-probe = "0.1.4" toml = "0.5" diff --git a/docker-compose-auth.yml b/docker-compose-auth.yml new file mode 100644 index 00000000..f91edef1 --- /dev/null +++ b/docker-compose-auth.yml @@ -0,0 +1,90 @@ +version: '2' +services: + zookeeper: + image: wurstmeister/zookeeper + ports: + - "2181:2181" + environment: + SERVER_JVMFLAGS: -Djava.security.auth.login.config=/opt/zookeeper_jaas.conf + -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider + -Dzookeeper.authProvider.2=org.apache.zookeeper.server.auth.DigestAuthenticationProvider + -DjaasLoginRenew=3600000 + -DrequireClientAuthScheme=sasl + volumes: + - ./zookeeper_jaas.conf:/opt/zookeeper_jaas.conf + + kafka: + image: wurstmeister/kafka + depends_on: + - zookeeper + ports: + - "9092:9092" + hostname: kafka + environment: + KAFKA_ADVERTISED_HOSTNAME: kafka + KAFKA_LISTENERS: "SASL_SSL://kafka:9092" + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: "" + KAFKA_SSL_TLS_VERSION: TLSv1.2 + KAFKA_SSL_PROTOCOL: TLSv1.2 + KAFKA_SSL_ENABLED_PROTOCOLS: TLSv1.2 + KAFKA_SSL_SECURE_RANDOM_IMPLEMENTATION: NativePRNG + KAFKA_SSL_CIPHER_SUITES: TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 + KAFKA_SSL_KEYSTORE_LOCATION: /tls/kafka.keystore.jks + KAFKA_SSL_KEYSTORE_PASSWORD: password + KAFKA_SSL_KEY_PASSWORD: password + KAFKA_SSL_TRUSTSTORE_LOCATION: /tls/kafka.truststore.jks + KAFKA_SSL_TRUSTSTORE_PASSWORD: password + KAFKA_CREATE_TOPICS: "ToolReports:6:1,DependencyEvents:6:1" + KAFKA_MESSAGE_MAX_BYTES: 10000000 + KAFKA_REPLICA_FETCH_MAX_BYTES: 10000000 + KAFKA_SECURITY_PROTOCOL: SASL_SSL + KAFKA_SASL_ENABLED_MECHANISMS: PLAIN + KAFKA_SECURITY_INTER_BROKER_PROTOCOL: SASL_SSL + KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: PLAIN + KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/broker-jaas.conf" + volumes: + - /var/run/docker.sock:/var/run/docker.sock + - ./tls:/tls + - ./broker-jaas.conf:/etc/kafka/broker-jaas.conf + data-collector: + depends_on: + - kafka + image: kiln/data-collector:git-latest + ports: + - "8081:8080" + environment: + - KAFKA_BOOTSTRAP_TLS=kafka:9092 + - ENABLE_KAFKA_AUTH=true + - KAFKA_SASL_AUTH_USERNAME=admin + - KAFKA_SASL_AUTH_PASSWORD=admin-secret + - DISABLE_KAFKA_DOMAIN_VALIDATION=true + volumes: + - ./tls:/tls + report-parser: + depends_on: + - kafka + image: kiln/report-parser:git-latest + environment: + - KAFKA_BOOTSTRAP_TLS=kafka:9092 + - ENABLE_KAFKA_AUTH=true + - KAFKA_SASL_AUTH_USERNAME=admin + - KAFKA_SASL_AUTH_PASSWORD=admin-secret + - DISABLE_KAFKA_DOMAIN_VALIDATION=true + volumes: + - ./tls:/tls + slack-connector: + depends_on: + - kafka + image: kiln/slack-connector:git-latest + environment: + - KAFKA_BOOTSTRAP_TLS=kafka:9092 + - RUST_LOG=info + - DISABLE_KAFKA_DOMAIN_VALIDATION=true + - ENABLE_KAFKA_AUTH=true + - KAFKA_SASL_AUTH_USERNAME=admin + - KAFKA_SASL_AUTH_PASSWORD=admin + - OAUTH2_TOKEN + - SLACK_CHANNEL_ID + volumes: + - ./tls:/tls diff --git a/kiln_lib/src/kafka.rs b/kiln_lib/src/kafka.rs index 23365dc1..616b017a 100644 --- a/kiln_lib/src/kafka.rs +++ b/kiln_lib/src/kafka.rs @@ -6,9 +6,6 @@ use rdkafka::error::KafkaError; use rdkafka::producer::future_producer::FutureProducer; use std::fmt::Display; -#[derive(Debug, Clone)] -pub struct KafkaBootstrapTlsConfig(Vec); - #[derive(Debug)] pub enum ValidationFailureReason { Missing, @@ -16,6 +13,19 @@ pub enum ValidationFailureReason { CouldNotBeParsed, } +#[derive(Debug, Clone)] +pub struct KafkaAuthConfig { + auth_required: bool, + username: Option, + password: Option, +} + +#[derive(Debug, Clone)] +pub struct KafkaBootstrapConfig { + tls_config: Vec, + auth_config: KafkaAuthConfig, +} + impl Display for ValidationFailureReason { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { @@ -44,7 +54,7 @@ pub enum KafkaConfigError { TlsTrustStore, } -pub fn get_bootstrap_config(vars: &mut I) -> Result +pub fn get_bootstrap_config(vars: &mut I) -> Result where I: Iterator, { @@ -52,26 +62,26 @@ where let disable_kafka_domain_validation = match local_vars .iter() .find(|var| var.0 == "DISABLE_KAFKA_DOMAIN_VALIDATION") - { - None => Ok(false), - Some(var) => { - if var.1.is_empty() { - return Err(KafkaConfigError::OptionalValueValidationFailure { - var: "DISABLE_KAFKA_DOMAIN_VALIDATION".into(), - reason: ValidationFailureReason::PresentButEmpty, - }); - } else { - match var.1.as_ref() { - "true" => Ok(true), - "false" => Ok(false), - _ => Err(KafkaConfigError::OptionalValueValidationFailure { + { + None => Ok(false), + Some(var) => { + if var.1.is_empty() { + return Err(KafkaConfigError::OptionalValueValidationFailure { var: "DISABLE_KAFKA_DOMAIN_VALIDATION".into(), - reason: ValidationFailureReason::CouldNotBeParsed, - }), + reason: ValidationFailureReason::PresentButEmpty, + }); + } else { + match var.1.as_ref() { + "true" => Ok(true), + "false" => Ok(false), + _ => Err(KafkaConfigError::OptionalValueValidationFailure { + var: "DISABLE_KAFKA_DOMAIN_VALIDATION".into(), + reason: ValidationFailureReason::CouldNotBeParsed, + }), + } } } - } - }?; + }?; let kafka_bootstrap_tls = match local_vars.iter().find(|var| var.0 == "KAFKA_BOOTSTRAP_TLS") { None => Err(KafkaConfigError::RequiredValueValidationFailure { @@ -110,11 +120,51 @@ where } }?; - Ok(KafkaBootstrapTlsConfig(kafka_bootstrap_tls)) + let check_config_var = |x: String| + match local_vars.iter().find(|var| var.0 == x) { + None => + Err(KafkaConfigError::OptionalValueValidationFailure { + var: x, + reason: ValidationFailureReason::Missing, + }), + Some(v) => + if v.1.is_empty(){ + Err(KafkaConfigError::OptionalValueValidationFailure { + var: x, + reason: ValidationFailureReason::PresentButEmpty, + }) + } else { + Ok(Some(v.1.to_owned())) + } + }; + + + let kafka_auth_config = match local_vars.iter().find(|var| var.0 == "ENABLE_KAFKA_AUTH") { + None => + KafkaAuthConfig { + auth_required: false, + username: None, + password: None, + }, + Some(_) => { + let username = check_config_var("KAFKA_SASL_AUTH_USERNAME".into())?; + let password = check_config_var("KAFKA_SASL_AUTH_PASSWORD".into())?; + KafkaAuthConfig { + auth_required: true, + username: username, + password: password, + } + }, + }; + + Ok(KafkaBootstrapConfig { + tls_config: kafka_bootstrap_tls, + auth_config: kafka_auth_config, + }) } pub fn build_kafka_producer( - config: KafkaBootstrapTlsConfig, + config: KafkaBootstrapConfig, ) -> Result { let cert_probe_result = openssl_probe::probe(); let cert_location = match cert_probe_result { @@ -122,20 +172,34 @@ pub fn build_kafka_producer( ProbeResult { cert_dir, .. } if cert_dir.is_some() => Ok(cert_dir), _ => Err(KafkaConfigError::TlsTrustStore), }?; - - ClientConfig::new() - .set("metadata.broker.list", &config.0.join(",")) - .set("compression.type", "gzip") - .set("security.protocol", "SSL") - .set("ssl.cipher.suites", "ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384,ECDHE-ECDSA-AES128-GCM-SHA256,ECDHE-RSA-AES128-GCM-SHA256") - .set("ssl.ca.location", cert_location.unwrap().to_string_lossy()) - .set("message.max.bytes", "10000000") - .create() - .map_err(|err| err.into()) + if config.auth_config.auth_required { + ClientConfig::new() + .set("metadata.broker.list", &config.tls_config.join(",")) + .set("compression.type", "gzip") + .set("ssl.cipher.suites", "ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384,ECDHE-ECDSA-AES128-GCM-SHA256,ECDHE-RSA-AES128-GCM-SHA256") + .set("ssl.ca.location", cert_location.unwrap().to_string_lossy()) + .set("message.max.bytes", "10000000") + .set("security.protocol","SASL_SSL") + .set("sasl.mechanism", "PLAIN") + .set("sasl.username", config.auth_config.username.unwrap()) + .set("sasl.password", config.auth_config.password.unwrap()) + .create() + .map_err(|err| err.into()) + } else { + ClientConfig::new() + .set("metadata.broker.list", &config.tls_config.join(",")) + .set("compression.type", "gzip") + .set("security.protocol", "SSL") + .set("ssl.cipher.suites", "ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384,ECDHE-ECDSA-AES128-GCM-SHA256,ECDHE-RSA-AES128-GCM-SHA256") + .set("ssl.ca.location", cert_location.unwrap().to_string_lossy()) + .set("message.max.bytes", "10000000") + .create() + .map_err(|err| err.into()) + } } pub fn build_kafka_consumer( - config: KafkaBootstrapTlsConfig, + config: KafkaBootstrapConfig, consumer_group_name: String, ) -> Result { let cert_probe_result = openssl_probe::probe(); @@ -145,16 +209,32 @@ pub fn build_kafka_consumer( _ => Err(KafkaConfigError::TlsTrustStore), }?; - ClientConfig::new() - .set("metadata.broker.list", &config.0.join(",")) - .set("group.id", &consumer_group_name) - .set("compression.type", "gzip") - .set("security.protocol", "SSL") - .set("ssl.cipher.suites", "ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384,ECDHE-ECDSA-AES128-GCM-SHA256,ECDHE-RSA-AES128-GCM-SHA256") - .set("ssl.ca.location", cert_location.unwrap().to_string_lossy()) - .set("fetch.message.max.bytes", "10000000") - .create() - .map_err(|err| err.into()) + if config.auth_config.auth_required { + ClientConfig::new() + .set("metadata.broker.list", &config.tls_config.join(",")) + .set("compression.type", "gzip") + .set("group.id", &consumer_group_name) + .set("ssl.cipher.suites", "ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384,ECDHE-ECDSA-AES128-GCM-SHA256,ECDHE-RSA-AES128-GCM-SHA256") + .set("ssl.ca.location", cert_location.unwrap().to_string_lossy()) + .set("fetch.message.max.bytes", "10000000") + .set("sasl.mechanism", "PLAIN") + .set("security.protocol","SASL_SSL") + .set("sasl.username", config.auth_config.username.unwrap()) + .set("sasl.password", config.auth_config.password.unwrap()) + .create() + .map_err(|err| err.into()) + } else { + ClientConfig::new() + .set("metadata.broker.list", &config.tls_config.join(",")) + .set("group.id", &consumer_group_name) + .set("compression.type", "gzip") + .set("security.protocol", "SSL") + .set("ssl.cipher.suites", "ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384,ECDHE-ECDSA-AES128-GCM-SHA256,ECDHE-RSA-AES128-GCM-SHA256") + .set("ssl.ca.location", cert_location.unwrap().to_string_lossy()) + .set("fetch.message.max.bytes", "10000000") + .create() + .map_err(|err| err.into()) + } } #[cfg(test)] @@ -164,16 +244,28 @@ mod tests { #[allow(unused_must_use)] #[tokio::test] async fn creating_kafka_producer_does_not_return_a_client_config_error() { - let config = - KafkaBootstrapTlsConfig(vec!["host1:1234".to_string(), "host2:1234".to_string()]); + let config = KafkaBootstrapConfig { + tls_config: vec!["host1:1234".to_string(), "host2:1234".to_string()], + auth_config: KafkaAuthConfig { + auth_required: false, + username: None, + password: None, + }, + }; build_kafka_producer(config).unwrap(); } #[allow(unused_must_use)] #[tokio::test] async fn creating_kafka_consumer_does_not_return_a_client_config_error() { - let config = - KafkaBootstrapTlsConfig(vec!["host1:1234".to_string(), "host2:1234".to_string()]); + let config = KafkaBootstrapConfig { + tls_config: vec!["host1:1234".to_string(), "host2:1234".to_string()], + auth_config: KafkaAuthConfig { + auth_required: false, + username: None, + password: None, + }, + }; build_kafka_consumer(config, "TestConsumerGroup".to_string()).unwrap(); } @@ -190,7 +282,7 @@ mod tests { let actual = get_bootstrap_config(&mut fake_vars).expect("expected Ok(_) value"); - assert_eq!(actual.0, expected); + assert_eq!(actual.tls_config, expected); } #[test] @@ -242,12 +334,12 @@ mod tests { "true".to_owned(), ), ] - .into_iter(); + .into_iter(); let expected = vec![hostname.clone()]; let actual = get_bootstrap_config(&mut fake_vars).expect("expected Ok(_) value"); - assert_eq!(actual.0, expected) + assert_eq!(actual.tls_config, expected) } #[test] @@ -282,7 +374,7 @@ mod tests { "DISABLE_KAFKA_DOMAIN_VALIDATION".to_owned(), "blah".to_owned(), )] - .into_iter(); + .into_iter(); let actual = get_bootstrap_config(&mut fake_vars).expect_err("expected Err(_) value"); assert_eq!( @@ -290,4 +382,105 @@ mod tests { "Optional environment variable DISABLE_KAFKA_DOMAIN_VALIDATION failed validation because value could not be parsed" ) } + + #[test] + fn get_bootstrap_config_returns_error_auth_enabled_but_username_unset() { + let hostname = "my.kafka.host.example.com:1234".to_owned(); + let mut fake_vars = vec![ + ("KAFKA_BOOTSTRAP_TLS".to_owned(), hostname.clone()), + ("ENABLE_KAFKA_AUTH".to_owned(), "true".to_owned()), + ] + .into_iter(); + + let actual = get_bootstrap_config(&mut fake_vars).expect_err("expected Err(_) value"); + + assert_eq!( + actual.to_string(), + "Optional environment variable KAFKA_SASL_AUTH_USERNAME failed validation because value is missing" + ) + } + + + #[test] + fn get_bootstrap_config_returns_error_auth_enabled_but_username_empty() { + let hostname = "my.kafka.host.example.com:1234".to_owned(); + let mut fake_vars = vec![ + ("KAFKA_BOOTSTRAP_TLS".to_owned(), hostname.clone()), + ("ENABLE_KAFKA_AUTH".to_owned(), "true".to_owned()), + ("KAFKA_SASL_AUTH_USERNAME".to_owned(), "".to_owned()), + ] + .into_iter(); + + let actual = get_bootstrap_config(&mut fake_vars).expect_err("expected Err(_) value"); + + assert_eq!( + actual.to_string(), + "Optional environment variable KAFKA_SASL_AUTH_USERNAME failed validation because value is present but empty" + ) + } + + + #[test] + fn get_bootstrap_config_returns_error_auth_enabled_but_password_unset() { + let hostname = "my.kafka.host.example.com:1234".to_owned(); + let mut fake_vars = vec![ + ("KAFKA_BOOTSTRAP_TLS".to_owned(), hostname.clone()), + ("ENABLE_KAFKA_AUTH".to_owned(), "true".to_owned()), + ("KAFKA_SASL_AUTH_USERNAME".to_owned(), "admin".to_owned()), + ] + .into_iter(); + + let actual = get_bootstrap_config(&mut fake_vars).expect_err("expected Err(_) value"); + + assert_eq!( + actual.to_string(), + "Optional environment variable KAFKA_SASL_AUTH_PASSWORD failed validation because value is missing" + ) + } + + #[test] + fn get_bootstrap_config_returns_error_auth_enabled_but_password_empty() { + let hostname = "my.kafka.host.example.com:1234".to_owned(); + let mut fake_vars = vec![ + ("KAFKA_BOOTSTRAP_TLS".to_owned(), hostname.clone()), + ("ENABLE_KAFKA_AUTH".to_owned(), "true".to_owned()), + ("KAFKA_SASL_AUTH_USERNAME".to_owned(), "admin".to_owned()), + ("KAFKA_SASL_AUTH_PASSWORD".to_owned(), "".to_owned()), + ] + .into_iter(); + + let actual = get_bootstrap_config(&mut fake_vars).expect_err("expected Err(_) value"); + + assert_eq!( + actual.to_string(), + "Optional environment variable KAFKA_SASL_AUTH_PASSWORD failed validation because value is present but empty" + ) + } + + + + #[test] + fn get_bootstrap_config_returns_correct_auth_config() { + let hostname = "my.kafka.host.example.com:1234".to_owned(); + let mut fake_vars = vec![ + ("KAFKA_BOOTSTRAP_TLS".to_owned(), hostname.clone()), + ("ENABLE_KAFKA_AUTH".to_owned(), "true".to_owned()), + ("KAFKA_SASL_AUTH_USERNAME".to_owned(), "admin".to_owned()), + ( + "KAFKA_SASL_AUTH_PASSWORD".to_owned(), + "adminpassword".to_owned(), + ), + ] + .into_iter(); + + let actual = get_bootstrap_config(&mut fake_vars) + .expect("No errors should be returned when values are set correctly"); + + assert_eq!(actual.auth_config.auth_required, true); + assert_eq!(actual.auth_config.username.unwrap(), "admin".to_string()); + assert_eq!( + actual.auth_config.password.unwrap(), + "adminpassword".to_string() + ); + } } diff --git a/report-parser/README.md b/report-parser/README.md index 074eb335..6631a73a 100644 --- a/report-parser/README.md +++ b/report-parser/README.md @@ -17,4 +17,6 @@ By default, this component will validate that hosts in the `KAFKA_BOOTSTRAP_TLS` If your Kafka cluster uses TLS certificates issued by a private Certificate Authority, you will need to provide the CA Certificate in PEM format so that certificate validation can be performed when connecting to the Kafka cluster. You should do this by including the CA certificate in PEM format in the `/tls` directory of the container, probably through a volume mount. +At present Kiln supports authentication between brokers and producers/consumers using the SASL_PLAIN mechanism. Authentication is optional and configured by setting the `ENABLE_KAFKA_AUTH` environment variable. If this variable is set, you also need to supply the username and password for authentication using `KAFKA_SASL_AUTH_USERNAME` and `KAFKA_SASL_AUTH_PASSWORD` environment variables respectively. + If you want to provide an alternative URL for downloading NIST NVD data, this can be configured by starting the report-paser with the `NVD_BASE_URL` environment variable set to the URL of your NVD mirror. diff --git a/slack-connector/README.md b/slack-connector/README.md index 5b8dcbe2..904927e2 100644 --- a/slack-connector/README.md +++ b/slack-connector/README.md @@ -22,3 +22,6 @@ If your Kafka cluster uses TLS certificates issued by a private Certificate Auth You will also need the Channel ID for the Slack Channel you want to route notifications to. This can be found by opening Slack in a web browser and loading the channel you want Kiln to send notifications to. The last components of the URL path will contain the channel ID and will begin with a 'C'. This is supplied to the connector using the `SLACK_CHANNEL_ID` environment variable. Lastly, you will need to supply the OAuth2 access token you created earlier as the `OAUTH2_TOKEN` environment variable. This value is a secret and should be handled accordingly to avoid accidental disclosure in shell history, logs etc. Unfortunately the topic of secrets management is out of the scope of this documentation. + +At present Kiln supports authentication between brokers and producers/consumers using the SASL_PLAIN mechanism. Authentication is optional and configured by setting the `ENABLE_KAFKA_AUTH` environment variable. If this variable is set, you also need to supply the username and password for authentication using `KAFKA_SASL_AUTH_USERNAME` and `KAFKA_SASL_AUTH_PASSWORD` environment variables respectively. + diff --git a/zookeeper_jaas.conf b/zookeeper_jaas.conf new file mode 100644 index 00000000..2649160f --- /dev/null +++ b/zookeeper_jaas.conf @@ -0,0 +1,6 @@ +Server { + org.apache.zookeeper.server.auth.DigestLoginModule required + user_admin="admin-secret"; +}; + +