diff --git a/samples/oci-logs-splunk-hec/README.md b/samples/oci-logs-splunk-hec/README.md new file mode 100644 index 0000000..6e5e233 --- /dev/null +++ b/samples/oci-logs-splunk-hec/README.md @@ -0,0 +1,96 @@ +## Monitor Oracle Cloud Infrastructure Logs with Splunk + +Forward logs from Oracle Cloud Infrastructure Logging service to Splunk via +HTTP Event Connector + + +## Prerequisites + +Before you deploy this sample function, make sure you have run steps A, B +and C of the [Oracle Functions Quick Start Guide for Cloud Shell](https://www.oracle.com/webfolder/technetwork/tutorials/infographics/oci_functions_cloudshell_quickview/functions_quickview_top/functions_quickview/index.html) +* A - Set up your tenancy +* B - Create application +* C - Set up your Cloud Shell dev environment + + +## List Applications + +Assuming you have successfully completed the prerequisites, you should see your +application in the list of applications. + +``` +fn ls apps +``` + + +## Configure your Function + +In order to send logs to Splunk you'll need to define two environment variables: +* `SPLUNK_HEC_ENDPOINT` - the HTTP/HTTPS REST endpoint for the HEC service +* `SPLUNK_HEC_TOKEN` - the Token used to authenticate + + +### Splunk Enterprise / Splunk Cloud + +If you haven't already you'll need to set up the HTTP Event Collector service +on your Splunk Instance. Instructions on configuring and using the HEC are +available at https://docs.splunk.com/Documentation/Splunk/9.0.1/Data/UsetheHTTPEventCollector. + + +## Deploy the function + +In Cloud Shell, run the `fn deploy` command to build *this* function and its dependencies as a Docker image, +push the image to the specified Docker registry, and deploy *this* function to Oracle Functions +in the application created earlier: + +![user input icon](./images/userinput.png) +``` +fn -v deploy --app +``` +e.g., +``` +fn -v deploy --app myapp +``` + + +## Configure the logs you want to capture + +1. From the [OCI Console](https://cloud.oracle.com) navigation menu, select **Logging**, and then select **Log Groups**. + +2. Click Create Log Group, select your compartment, add a Name and Description + +3. Select Logs in the left menu, click Enable Service Log, select your compartment, select Log Category on Service and fill the rest of the fields appropriately. + + +## Create a Service Connector for reading logs from Logging and send to Functions + +1. From the navigation menu, select **Logging**, and then select **Service Connectors**. + +2. Click Create Connector, add a Name, Description, select the compartment, select the Source as **Logging** and Target as **Functions**. + +3. On Configure Source connection, select the compartment, select the Log Group created earlier. + +4. On Configure Target connection, select the compartment and select the Function. If prompted to create a policy for writing to functions, click Create. + + +## Monitoring Functions and Service Connector + +Make sure you configure basic observability for your function and connector using metrics, alarms and email alerts: +* [Basic Guidance for Monitoring your Functions](../basic-observability/functions.md) +* [Basic Guidance for Monitoring your Service Connector](../basic-observability/service-connector-hub.md) + +--- +## Function Environment + +Here are the supported Function parameters: + +| Environment Variable | Default | Purpose | +| ------------- |:-------------:| :----- | +| SPLUNK_HEC_ENDPOINT | not-configured | REST API endpoint for reaching Splunk HEC ([see docs](https://docs.splunk.com/Documentation/Splunk/9.2.0/Data/UsetheHTTPEventCollector#Configure_HTTP_Event_Collector_on_Splunk_Cloud_Platform))| +| SPLUNK_HEC_TOKEN | not-configured | HEC authentication token obtained from Splunk HEC configuration | +| LOGGING_LEVEL | INFO | Controls function logging outputs. Choices: INFO, WARN, CRITICAL, ERROR, DEBUG | + +--- +## License +Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. +Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. diff --git a/samples/oci-logs-splunk-hec/func.py b/samples/oci-logs-splunk-hec/func.py new file mode 100644 index 0000000..b66909e --- /dev/null +++ b/samples/oci-logs-splunk-hec/func.py @@ -0,0 +1,55 @@ +# +# oci-logs-splunk-hec version 0.1. +# +# Copyright (c) 2024 Splunk, Inc. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. +# + +import io +import os +import json +import requests +import logging +from fdk import response + + +""" +This Function receives the logging json and forwards to the Splunk HTTP Event +Connector (HEC) for ingesting logs. Logging Format Overview +https://docs.cloud.oracle.com/en-us/iaas/Content/Logging/Reference/top_level_logging_format.htm#top_level_logging_format +If this Function is invoked with more than one log the function go over each log and invokes the HEC endpoint for ingesting one by one. +""" + +def handler(ctx, data: io.BytesIO=None): + try: + logs = json.loads(data.getvalue()) + + # no need to have verbose logs from log forwarder + urllib3_logger = logging.getLogger('urllib3') + urllib3_logger.setLevel(logging.CRITICAL) + + # Splunk HEC endpoint URL and token to call the REST interface. These values are defined in func.yaml + hec_endpoint = os.environ['SPLUNK_HEC_ENDPOINT'] + hec_token = os.environ['SPLUNK_HEC_TOKEN'] + headers = {'Content-type': 'application/json', 'Authorization': str("Splunk " + str(hec_token))} + + # loop over each log and reformat for HEC. + concat_body = "" + for item in logs: + event = item['oracle'] + event.update(item['data']) + body = {} + body['event'] = event + body['source'] = 'oci:' + item['source'] + body['sourcetype'] = '_json' + concat_body = concat_body + str(json.dumps(body)) + + # Post the message to HEC payload. + if len(concat_body) > 0: + x = requests.post(hec_endpoint, data = concat_body, headers=headers) + if x.status_code != 200: + logging.getLogger().info(x.text) + + except (Exception, ValueError) as ex: + logging.getLogger().info(str(ex)) + return diff --git a/samples/oci-logs-splunk-hec/func.yaml b/samples/oci-logs-splunk-hec/func.yaml new file mode 100644 index 0000000..c7e84e4 --- /dev/null +++ b/samples/oci-logs-splunk-hec/func.yaml @@ -0,0 +1,6 @@ +schema_version: 20180708 +name: oci-logs-to-splunk-hec +version: 0.0.48 +runtime: python3.9 +entrypoint: /python/bin/fdk /function/func.py handler +memory: 256 diff --git a/samples/oci-logs-splunk-hec/requirements.txt b/samples/oci-logs-splunk-hec/requirements.txt new file mode 100644 index 0000000..0291832 --- /dev/null +++ b/samples/oci-logs-splunk-hec/requirements.txt @@ -0,0 +1,3 @@ +fdk +requests +oci diff --git a/samples/oci-monitoring-metrics-to-splunk-observability-python/LICENSE.txt b/samples/oci-monitoring-metrics-to-splunk-observability-python/LICENSE.txt new file mode 100644 index 0000000..cf9c92b --- /dev/null +++ b/samples/oci-monitoring-metrics-to-splunk-observability-python/LICENSE.txt @@ -0,0 +1,28 @@ +Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. + +The Universal Permissive License (UPL), Version 1.0 + +Subject to the condition set forth below, permission is hereby granted to any person obtaining a copy of this +software, associated documentation and/or data (collectively the "Software"), free of charge and under any +and all copyright rights in the Software, and any and all patent rights owned or freely licensable by each +licensor hereunder covering either (i) the unmodified Software as contributed to or provided by such licensor, +or (ii) the Larger Works (as defined below), to deal in both + +(a) the Software, and + +(b) any piece of software and/or hardware listed in the lrgrwrks.txt file if one is included with the +Software (each a “Larger Work” to which the Software is contributed by such licensors), without restriction, +including without limitation the rights to copy, create derivative works of, display, perform, and +distribute the Software and make, use, sell, offer for sale, import, export, have made, and have sold +the Software and the Larger Work(s), and to sublicense the foregoing rights on either these or other terms. + +This license is subject to the following condition: + +The above copyright notice and either this complete permission notice or at a minimum a reference to the +UPL must be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE +OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/samples/oci-monitoring-metrics-to-splunk-observability-python/README.md b/samples/oci-monitoring-metrics-to-splunk-observability-python/README.md new file mode 100644 index 0000000..a5d00a5 --- /dev/null +++ b/samples/oci-monitoring-metrics-to-splunk-observability-python/README.md @@ -0,0 +1,245 @@ +# Exporting OCI Monitoring Service Metrics to Splunk Observability + +--- + +## Overview + +Let's take a look at bringing Oracle Cloud Infrastructure (OCI)’s rich Metrics resources over to +Splunk Observability to accomplish common goals such DevOps monitoring, application performance +monitoring, and so on. Splunk's API does expect metrics to be typed (gauge, counter, cumulative +counter) and uses dimensions for aggregation and filtering. Splunk Observability processes and +alerts on metrics via streaming analytics at extremely low latency, but does require that data +arrives in temporal order. + +### Prerequisites + +If you’re new to Functions, get familiar by running through +the [Quick Start guide on OCI Functions](http://docs.oracle.com/en-us/iaas/Content/Functions/Tasks/functionsquickstartguidestop.htm) before proceeding. + +--- +## Solution Architecture + +![](images/architecture.png) + +Here is the basic architecture and flow of data from beginning to end: + +* OCI services emit metric data which is captured by the Monitoring service. +* The Monitoring Service feeds metric data events to a Service Connector. +* The Service Connector invokes a Function which transforms the metric data +payload to Splunk Observability format and posts the transformed payload to the +Splunk Observability REST API. +* Splunk ingests the metrics, building its own aggregations using the provided tagging. + +--- +## Deployment Order + +The OCI metrics Service Connector model is a fairly natural fit to the Splunk +Observability streamin metrics platform so the deployment involes only a few +points of coniguration. Nevertheless there is an order dependency to deployment: + +1. Set up your environments. You need accounts in Oracle Cloud and Splunk Observability. Trial accounts +have all the features you need, so you can create trials if you prefer. Also, as a reminder, If you +haven't used Functions in Oracle Cloud before the [Quick Start guide on OCI Functions](http://docs.oracle.com/en-us/iaas/Content/Functions/Tasks/functionsquickstartguidestop.htm) previously mentioned is where you should start. +2. Deploy the Function (required before you configure the Service Connector). +Be sure to double check that your values for SPLUNK_O11Y_REALM and SPLUNK_O11Y_TOKEN +in the Function Resources Configuration +3. Create the Service Connector. Starting with a single namespace (`oci_vcn`) +works well and once you see that work you can edit and add any namespaces you wish. + +With that order in mind, let's drill down into the OCI Services involved. + +--- +## Monitoring Service + + The [Monitoring Service](https://docs.oracle.com/en-us/iaas/Content/Monitoring/Concepts/monitoringoverview.htm) + receives timestamp-value pairs (aka metric data points) which also carry contextual +dimensions and metadata about the services or applications that emitted them. + +--- +## Service Connector Hub + +The stream of Metric data is event-driven and must be handled on-demand and at scale. The +[Service Connector Hub](https://docs.oracle.com/en-us/iaas/Content/service-connector-hub/overview.htm) does +exactly that. See [Service Connector Hub documentation](https://docs.oracle.com/en-us/iaas/Content/service-connector-hub/overview.htm) for details. + +--- +## Functions Service + +I need to transform between the raw metrics formats and some way to make the Datadog API calls. The +[OCI Functions Service](http://docs.oracle.com/en-us/iaas/Content/Functions/Concepts/functionsoverview.htm) is a +natural fit for the task. Functions integrate nicely with Service Connector Hub as as a target and can scale up +depending on the demand. That lets me focus on writing the logic needed without needing to address how to +deploy and scale it. + +--- +## Mapping From OCI to Splunk Observability Formats + +A key requirement of course is the mapping of OCI to Splunk Observability format. Let's compare the OCI and Splunk Observability message payload formats, what the mapping needs to accomplish, and see what the resulting transformed message +looks like. + +Example OCI Metrics Payload: + + { + "namespace": "oci_vcn", + "resourceGroup": null, + "compartmentId": "ocid1.compartment.oc1...", + "name": "VnicFromNetworkBytes", + "dimensions": { + "resourceId": "ocid1.vnic.oc1.phx..." + }, + "metadata": { + "displayName": "Bytes from Network", + "unit": "bytes" + }, + "datapoints": [ + { + "timestamp": 1652196912000, + "value": 5780.0, + "count": 1 + } + ] + } + +Example Splunk Observability Metrics Payload: + + { + "gauge": [ + { + "metric": "string", + "value": 0, + "dimensions": { + "" : "any" + }, + "timestamp" : 1557225030000 + } + ] + } + +Mapping Behavior: + + { + "gauge": [ + { + "metric": "{name}", + "value": {datapoint.value} + "dimensions": { + "oci_namespace" : "{namespace}", + "oci_resource_group" : "{resourceGroup}" << if not null + "oci_compartment_id" : "{compartmentId}", + "oci_unit" : "{metadata.unit}", + "oci_dim_" : "" <<= "{dimensions}" + } + "timestamp": {datapoint.timestamp} + } + ], + } + +Resulting Output: + + { + "gauge": [ + { + "metric": "VnicFromNetworkBytes", + "value": 5780.0 + "dimensions": { + "oci_namespace" : "oci_vcn", + "oci_compartment_id" : "ocid1.compartment.oc1...", + "oci_unit" : "bytes", + "oci_dim_resourceId" : "ocid1.vnic.oc1.phx..." + } + "timestamp": 1652196912000 + } + ], + } + +--- +## Service Connector Setup + +Now let’s set up a simple service connector instance that takes Monitoring sources and passes them to our Function. + +Because your Function requires a VCN, you can use that VCN as the metric source to test against. Let's test +with the `oci_vcn` Monitoring namespace because it will quickly generate a lot of useful events. + +Select Monitoring as the source and the Function as the target. Configure your source as the +compartment where the VCN resides and select the Monitoring namespace (`oci_vcn`) that you want to +pick up. Select your Application and the Function within it as the target. + +
+ +[](image.png) + +Finally, and *importantly*, Service Connector will need permissions to be able to +read metrics from Monitoring and to be able to execute the Cloud Function you've created. +When you are creating this Service Connector the configuration will offer to automatically +create these policies for you. Do that. For reference here are examples of the policies +created: + +Read Permission for metrics: + +``` +allow any-user to read metrics in tenancy where all {request.principal.type='serviceconnector', request.principal.compartment.id='ocid1.tenancy.oc1..abcdefghijkandsoonandsoforthandonandon', target.compartment.id in ('ocid1.tenancy.oc1..abcdefghijkandsoonandsoforthandonandon')} +``` + +Use and Invoke Permission for functions: + +``` +allow any-user to use fn-function in compartment id ocid1.tenancy.oc1..abcdefghijkandsoonandsoforthandonandon where all {request.principal.type='serviceconnector', request.principal.compartment.id='ocid1.tenancy.oc1..abcdefghijkandsoonandsoforthandonandon'} + +allow any-user to use fn-invocation in compartment id ocid1.tenancy.oc1..abcdefghijkandsoonandsoforthandonandon where all {request.principal.type= 'serviceconnector', request.principal.compartment.id='ocid1.tenancy.oc1..abcdefghijkandsoonandsoforthandonandon'} +``` + +--- +## View Metrics In Splunk Observability + +When you have the Service Connector configured, metrics appear in Splunk Observability's Finder +after a few minutes. The following images show the Metrics Finder and Chart Builder interfaces in +Splunk Observability. Your VCN metrics are displayed. + + +[](image.png) + +
+ +[](image.png) + +--- +## Function Environment + +Here are the supported Function parameters: + +| Environment Variable | Default | Purpose | +| ------------- |:-------------:| :----- | +| SPLUNK_O11Y_REALM | us0 | Realm which identifies REST API endpoint for reaching Splunk Observability ([see docs](https://dev.splunk.com/observability/reference/api/ingest_data/latest#endpoint-send-metrics))| +| SPLUNK_O11Y_TOKEN | not-configured | Ingest auth token obtained from Splunk Observability | +| LOGGING_LEVEL | INFO | Controls function logging outputs. Choices: INFO, WARN, CRITICAL, ERROR, DEBUG | +| ENABLE_TRACING | False | Enables complete exception stack trace logging | +| FORWARD_TO_SPLUNK | True | Determines whether messages are forwarded to Splunk | + +--- +## Conclusion + +You now have a low-maintenance, serverless function that can send raw metrics over to Splunk Observability in +near-real time. + +For more information, see the following resources: + +- [Splunk Observability API Reference](https://dev.splunk.com/observability/reference) +- [Splunk Observability Send Datapoints API](https://dev.splunk.com/observability/reference/api/ingest_data/latest#endpoint-send-metrics) + +--- +## **OCI** Related Workshops + +LiveLabs is the place to explore Oracle's products and services using workshops designed to +enhance your experience building and deploying applications on the Cloud and On-Premises. +ur library of workshops cover everything from how to provision the world's first autonomous +database to setting up a webserver on our world class OCI Generation 2 infrastructure, +machine learning and much more. Use your existing Oracle Cloud account, +a [Free Tier](https://www.oracle.com/cloud/free/) account or a LiveLabs Cloud Account to build, test, +and deploy applications on Oracle's Cloud. + +Visit [LiveLabs](http://bit.ly/golivelabs) now to get started. Workshops are added weekly, please visit frequently for new content. + +--- +## License +Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. +Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. diff --git a/samples/oci-monitoring-metrics-to-splunk-observability-python/func.py b/samples/oci-monitoring-metrics-to-splunk-observability-python/func.py new file mode 100644 index 0000000..3f4e491 --- /dev/null +++ b/samples/oci-monitoring-metrics-to-splunk-observability-python/func.py @@ -0,0 +1,247 @@ +# +# oci-monitoring-metrics-to-splunk-observability version 0.1. +# +# Copyright (c) 2024, Splunk, Inc. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. + +import io +import json +import logging +import os +import re +import requests +from fdk import response +from datetime import datetime + +""" +This sample OCI Function maps OCI Monitoring Service Metrics to the Splunk +Observability REST API '/datapoint' contract found here: + +https://dev.splunk.com/observability/reference/api/ingest_data/latest#endpoint-send-metrics + +""" + +# Use OCI Application or Function configurations to override these environment variable defaults. + +api_token = os.getenv('SPLUNK_O11Y_TOKEN', 'not-configured') +api_realm = os.getenv('SPLUNK_O11Y_REALM', 'us0') +is_forwarding = eval(os.getenv('FORWARD_TO_SPLUNK_O11Y', "True")) + +# Set all registered loggers to the configured log_level + +logging_level = os.getenv('LOGGING_LEVEL', 'INFO') +loggers = [logging.getLogger()] + [logging.getLogger(name) for name in logging.root.manager.loggerDict] +[logger.setLevel(logging.getLevelName(logging_level)) for logger in loggers] + +# Exception stack trace logging + +is_tracing = eval(os.getenv('ENABLE_TRACING', "False")) + +# Constants + +TEN_MINUTES_MSEC = 10 * 60 * 1000 +ONE_HOUR_MSEC = 60 * 60 * 1000 + +# Functions + +def handler(ctx, data: io.BytesIO = None): + """ + OCI Function Entry Point + :param ctx: InvokeContext + :param data: data payload + :return: plain text response indicating success or error + """ + + preamble = " {} / event count = {} / logging level = {} / forwarding to Splunk = {}" + + try: + metrics_list = json.loads(data.getvalue()) + logging.getLogger().info(preamble.format(ctx.FnName(), len(metrics_list), logging_level, is_forwarding)) + logging.getLogger().debug(metrics_list) + converted_events = handle_metric_events(event_list=metrics_list) + send_to_splunk_o11y(events=converted_events) + + except (Exception, ValueError) as ex: + logging.getLogger().error('error handling logging payload: {}'.format(str(ex))) + if is_tracing: + logging.getLogger().error(ex) + + +def handle_metric_events(event_list): + """ + :param event_list: the list of metric formatted log records. + :return: the list of Splunk Observability formatted log records + """ + + result_list = [] + for event in event_list: + list_result = transform_metric_to_splunk_o11y_format_list(log_record=event) + result_list.extend(list_result) + logging.getLogger().debug(list_result) + + return result_list + + +def transform_metric_to_splunk_o11y_format_list(log_record: dict): + """ + Transform metrics to Splunk Observability format. OCI does not define metric + types, so all OCI metrics are presented as gauge type. + See: https://dev.splunk.com/observability/reference/api/ingest_data/latest#endpoint-send-metrics + :param log_record: metric log record + :return: Splunk Observability json datapoint record + """ + + o11y_dps = [] + datapoints = get_dictionary_value(dictionary=log_record, target_key='datapoints') + metric_name = get_dictionary_value(log_record, 'name') + metric_dims = get_metric_dimensions(log_record) + for point in datapoints: + o11y_dp = { + 'metric': metric_name, + 'value' : point.get('value'), + 'dimensions' : metric_dims, + 'timestamp' : point.get('timestamp') + } + o11y_dps.append(o11y_dp) + + ordered_dps = sorted(o11y_dps, key=lambda dp: dp['timestamp']) + return ordered_dps + + +def get_metric_dimensions(log_record: dict): + """ + Assembles dimensions from selected metric attributes. + :param log_record: the log record to scan + :return: dictionary of dimensions meeting Splunk Observability semantics + """ + + result = {} + + # context dimensions + + result['oci_namespace'] = get_dictionary_value(dictionary=log_record, target_key="namespace") + result['oci_compartment_id'] = get_dictionary_value(dictionary=log_record, target_key="compartmentId") + unit = get_dictionary_value(dictionary=log_record, target_key="unit") + if unit is not None: + result['oci_unit'] = unit + rg = get_dictionary_value(dictionary=log_record, target_key="resourceGroup") + if rg is not None: + result['oci_namespace'] = rg + + dim_dict = get_dictionary_value(dictionary=log_record, target_key="dimensions") + for dim in dim_dict.items(): + if fix_dimension_value(dim[1]) is not None: + result[ fix_dimension_name('oci_dim_' + str(dim[0])) ] = fix_dimension_value( dim[1] ) + + return result + + +def fix_dimension_name (name): + nowhitespace = ((str(name).strip()).replace(' ','')) + noleadunderscores = nowhitespace.lstrip('_') + noquotes = (noleadunderscores.replace('\"', '_')).replace('\'','_') + nottoolong = noquotes[:128] + return nottoolong + + +def fix_dimension_value (value): + nowhitespace = (str(value)).strip() + noquotes = (nowhitespace.replace('\"', '_')).replace('\'','_') + nottoolong = noquotes[:256] + return nottoolong + + +def send_to_splunk_o11y (events): + """ + Sends each transformed event to Splunk Observability Endpoint. + :param events: list of events in Splunk Observability format + :return: None + """ + + if is_forwarding is False: + logging.getLogger().debug("Splunk Observability forwarding is disabled - nothing sent") + return + + # creating a session and adapter to avoid recreating + # a new connection pool between each POST call + + try: + session = requests.Session() + adapter = requests.adapters.HTTPAdapter(pool_connections=10, pool_maxsize=10) + session.mount('https://', adapter) + + api_headers = {'Content-Type': 'application/json', 'X-SF-Token': api_token} + sorted_events = sorted(events, key=lambda dp: dp['timestamp']) + message_body = {'gauge' : sorted_events} + logging.getLogger().debug("json to splunk observability: {}".format (json.dumps(message_body))) + logging.getLogger().debug("headers to splunk observability: {}".format (json.dumps(api_headers))) + post_url = 'https://ingest.%s.signalfx.com/v2/datapoint' % (api_realm) + logging.getLogger().debug("post to splunk observability: {}".format (post_url)) + response = session.post(post_url, data=json.dumps(message_body), headers=api_headers) + + if response.status_code != 200: + raise Exception ('error {} sending to Splunk Observability: {}'.format(response.status_code, response.reason)) + + finally: + session.close() + + +def get_dictionary_value(dictionary: dict, target_key: str): + """ + Recursive method to find value within a dictionary which may also have nested lists / dictionaries. + :param dictionary: the dictionary to scan + :param target_key: the key we are looking for + :return: If a target_key exists multiple times in the dictionary, the first one found will be returned. + """ + + if dictionary is None: + raise Exception('dictionary None for key'.format(target_key)) + + target_value = dictionary.get(target_key) + if target_value: + return target_value + + for _, value in dictionary.items(): + if isinstance(value, dict): + target_value = get_dictionary_value(dictionary=value, target_key=target_key) + if target_value: + return target_value + + elif isinstance(value, list): + for entry in value: + if isinstance(entry, dict): + target_value = get_dictionary_value(dictionary=entry, target_key=target_key) + if target_value: + return target_value + + +def local_test_mode(filename): + """ + This routine reads a local json metrics file, converting the contents to Splunk Observability format. + :param filename: cloud events json file exported from OCI Logging UI or CLI. + :return: None + """ + + logging.getLogger().info("local testing started") + + with open(filename, 'r') as f: + transformed_results = list() + + for line in f: + event = json.loads(line) + logging.getLogger().debug(json.dumps(event, indent=4)) + transformed_result = transform_metric_to_splunk_o11y_format_list(event) + transformed_results.append(transformed_result) + + logging.getLogger().debug(json.dumps(transformed_results, indent=4)) + send_to_splunk_o11y(events=transformed_results) + + logging.getLogger().info("local testing completed") + + +""" +Local Debugging +""" + +if __name__ == "__main__": + local_test_mode('oci-metrics-test-file.json') diff --git a/samples/oci-monitoring-metrics-to-splunk-observability-python/func.yaml b/samples/oci-monitoring-metrics-to-splunk-observability-python/func.yaml new file mode 100644 index 0000000..95d36fa --- /dev/null +++ b/samples/oci-monitoring-metrics-to-splunk-observability-python/func.yaml @@ -0,0 +1,8 @@ +schema_version: 20180708 +name: oci-monitoring-metrics-to-splunk-o11y-python +version: 0.0.1 +runtime: python +build_image: fnproject/python:3.9-dev +run_image: fnproject/python:3.9 +entrypoint: /python/bin/fdk /function/func.py handler +memory: 256 diff --git a/samples/oci-monitoring-metrics-to-splunk-observability-python/images/o11y-chart.png b/samples/oci-monitoring-metrics-to-splunk-observability-python/images/o11y-chart.png new file mode 100644 index 0000000..3eb370c Binary files /dev/null and b/samples/oci-monitoring-metrics-to-splunk-observability-python/images/o11y-chart.png differ diff --git a/samples/oci-monitoring-metrics-to-splunk-observability-python/images/o11y-metric-finder.png b/samples/oci-monitoring-metrics-to-splunk-observability-python/images/o11y-metric-finder.png new file mode 100644 index 0000000..a6c49b6 Binary files /dev/null and b/samples/oci-monitoring-metrics-to-splunk-observability-python/images/o11y-metric-finder.png differ diff --git a/samples/oci-monitoring-metrics-to-splunk-observability-python/images/sch-setup.png b/samples/oci-monitoring-metrics-to-splunk-observability-python/images/sch-setup.png new file mode 100644 index 0000000..2e2a818 Binary files /dev/null and b/samples/oci-monitoring-metrics-to-splunk-observability-python/images/sch-setup.png differ diff --git a/samples/oci-monitoring-metrics-to-splunk-observability-python/requirements.txt b/samples/oci-monitoring-metrics-to-splunk-observability-python/requirements.txt new file mode 100644 index 0000000..bd7dccf --- /dev/null +++ b/samples/oci-monitoring-metrics-to-splunk-observability-python/requirements.txt @@ -0,0 +1,3 @@ +oci +requests +fdk