logstash-converted-pipeline: source: http: max_connection_count: 500 request_timeout: 10000 port: 3000 processor: - grok: break_on_match: false match: /data/log1: - "%{COMBINEDAPACHELOG}" - "%{NUMBER} %{GREEDYDATA:/nested/field/data1}" /data/log2: - "%{COMBINEDAPACHELOG}" - "%{NUMBER:num:int} %{GREEDYDATA:/nested/field/data2}" keys_to_overwrite: - "/nested/field/data1" - "/nested/field/data2" - date: from_time_received: false destination_timezone: "UTC" destination: "/data/@timestamp" match: - key: "/data/timestamp" patterns: - "yyyy-MM-dd" - key_value: source: "/data/message" destination: "/data/test" - add_entries: entries: - key: "/messages/message1" value: 3 - key: "/messages/message2" value: 4.2 - key: "/messages/message3" value: "test3" - key: "/messages/message4" value: true - rename_keys: entries: - from_key: "/messages/messagea" to_key: "/newMessage" - from_key: "/messages/messageb" to_key: "/newMessage2" - copy_values: entries: - from_key: "/messages/messagec" to_key: "/messaged" - delete_entries: with_keys: - "/messages/test" - "/test2" - "/messages/test3" - "/test4" - uppercase_string: with_keys: - "/message/uppercaseField" sink: - opensearch: hosts: - "https://localhost:19000" username: "myuser" password: "mypassword" index: "simple-pipeline"