{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# Netbacks Freight Hire Comparison\n", "\n", "This script allows you to plot multiple percentages of freight hire included in a given Netback.\n", "\n", "This script uses elements from our API code samples. If you'd like a more basic and informative example of how to pull data via the Spark API, please visit our Github or API website:\n", "\n", "- Github: https://github.com/spark-commodities/api-code-samples/blob/master/jupyter_notebooks/\n", "- API Website: https://www.sparkcommodities.com/api/code-examples/jupyter.html\n", "\n", "\n", "### Have any questions?\n", "\n", "If you have any questions regarding our API, or need help accessing specific datasets, please contact us at:\n", "\n", "__data@sparkcommodities.com__\n", "\n", "or refer to our API website for more information about this endpoint:\n", "https://www.sparkcommodities.com/api/request/netbacks.html\n", "\n", "__N.B. This script requires a Cargo subscription__" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## 1. Importing Data\n", "\n", "Here we define the functions that allow us to retrieve the valid credentials to access the Spark API.\n", "\n", "This section can remain unchanged for most Spark API users." ] }, { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "import json\n", "import os\n", "import sys\n", "import numpy as np\n", "from base64 import b64encode\n", "from pprint import pprint\n", "from urllib.parse import urljoin\n", "import pandas as pd\n", "import time\n", "import requests\n", "from io import StringIO\n", "\n", "\n", "try:\n", " from urllib import request, parse\n", " from urllib.error import HTTPError\n", "except ImportError:\n", " raise RuntimeError(\"Python 3 required\")\n", "\n", "\n", "\n", "\n", "API_BASE_URL = \"https://api.sparkcommodities.com\"\n", "\n", "\n", "def retrieve_credentials(file_path=None):\n", " \"\"\"\n", " Find credentials either by reading the client_credentials file or reading\n", " environment variables\n", " \"\"\"\n", " if file_path is None:\n", "\n", " client_id = os.getenv(\"SPARK_CLIENT_ID\")\n", " client_secret = os.getenv(\"SPARK_CLIENT_SECRET\")\n", " if not client_id or not client_secret:\n", " raise RuntimeError(\n", " \"SPARK_CLIENT_ID and SPARK_CLIENT_SECRET environment vars required\"\n", " )\n", " else:\n", " # Parse the file\n", " if not os.path.isfile(file_path):\n", " raise RuntimeError(\"The file {} doesn't exist\".format(file_path))\n", "\n", " with open(file_path) as fp:\n", " lines = [l.replace(\"\\n\", \"\") for l in fp.readlines()]\n", "\n", " if lines[0] in (\"clientId,clientSecret\", \"client_id,client_secret\"):\n", " client_id, client_secret = lines[1].split(\",\")\n", " else:\n", " print(\"First line read: '{}'\".format(lines[0]))\n", " raise RuntimeError(\n", " \"The specified file {} doesn't look like to be a Spark API client \"\n", " \"credentials file\".format(file_path)\n", " )\n", "\n", " print(\">>>> Found credentials!\")\n", " print(\n", " \">>>> Client_id={}, client_secret={}****\".format(client_id, client_secret[:5])\n", " )\n", "\n", " return client_id, client_secret\n", "\n", "\n", "def do_api_post_query(uri, body, headers):\n", " url = urljoin(API_BASE_URL, uri)\n", "\n", " data = json.dumps(body).encode(\"utf-8\")\n", "\n", " # HTTP POST request\n", " req = request.Request(url, data=data, headers=headers)\n", " try:\n", " response = request.urlopen(req)\n", " except HTTPError as e:\n", " print(\"HTTP Error: \", e.code)\n", " print(e.read())\n", " sys.exit(1)\n", "\n", " resp_content = response.read()\n", "\n", " # The server must return HTTP 201. Raise an error if this is not the case\n", " assert response.status == 201, resp_content\n", "\n", " # The server returned a JSON response\n", " content = json.loads(resp_content)\n", "\n", " return content\n", "\n", "\n", "def do_api_get_query(uri, access_token):\n", " url = urljoin(API_BASE_URL, uri)\n", "\n", " headers = {\n", " \"Authorization\": \"Bearer {}\".format(access_token),\n", " \"Accept\": \"application/json\",\n", " }\n", "\n", " # HTTP POST request\n", " req = request.Request(url, headers=headers)\n", " try:\n", " response = request.urlopen(req)\n", " except HTTPError as e:\n", " print(\"HTTP Error: \", e.code)\n", " print(e.read())\n", " sys.exit(1)\n", "\n", " resp_content = response.read()\n", "\n", " # The server must return HTTP 201. Raise an error if this is not the case\n", " assert response.status == 200, resp_content\n", "\n", " # The server returned a JSON response\n", " content = json.loads(resp_content)\n", "\n", " return content\n", "\n", "\n", "def get_access_token(client_id, client_secret):\n", " \"\"\"\n", " Get a new access_token. Access tokens are the thing that applications use to make\n", " API requests. Access tokens must be kept confidential in storage.\n", "\n", " # Procedure:\n", "\n", " Do a POST query with `grantType` and `scopes` in the body. A basic authorization\n", " HTTP header is required. The \"Basic\" HTTP authentication scheme is defined in\n", " RFC 7617, which transmits credentials as `clientId:clientSecret` pairs, encoded\n", " using base64.\n", " \"\"\"\n", "\n", " # Note: for the sake of this example, we choose to use the Python urllib from the\n", " # standard lib. One should consider using https://requests.readthedocs.io/\n", "\n", " payload = \"{}:{}\".format(client_id, client_secret).encode()\n", " headers = {\n", " \"Authorization\": b64encode(payload).decode(),\n", " \"Accept\": \"application/json\",\n", " \"Content-Type\": \"application/json\",\n", " }\n", " body = {\n", " \"grantType\": \"clientCredentials\",\n", " \"scopes\": \"read:netbacks\"\n", " }\n", "\n", " content = do_api_post_query(uri=\"/oauth/token/\", body=body, headers=headers)\n", "\n", " print(\n", " \">>>> Successfully fetched an access token {}****, valid {} seconds.\".format(\n", " content[\"accessToken\"][:5], content[\"expiresIn\"]\n", " )\n", " )\n", "\n", " return content[\"accessToken\"]\n", "\n", "\n", "def list_netbacks(access_token):\n", " \"\"\"\n", " Fetch available routes. Return contract ticker symbols\n", "\n", " # Procedure:\n", "\n", " Do a GET query to /v1.0/routes/ with a Bearer token authorization HTTP header.\n", " \"\"\"\n", " content = do_api_get_query(uri=\"/v1.0/netbacks/reference-data/\", access_token=access_token)\n", "\n", " print(\">>>> All the routes you can fetch\")\n", " tickers = []\n", " fobPort_names = []\n", "\n", " availablevia = []\n", " for contract in content[\"data\"]['staticData']['fobPorts']:\n", " \n", " #print(contract)\n", " #print(contract[\"uuid\"])\n", " tickers.append(contract[\"uuid\"])\n", " fobPort_names.append(contract['name'])\n", " \n", " availablevia.append(contract['availableViaPoints'])\n", " \n", " reldates = content[\"data\"]['staticData']['sparkReleases']\n", " \n", " dicto1 = content[\"data\"]\n", " \n", " return tickers, fobPort_names, availablevia, reldates, dicto1\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### N.B. Credentials\n", "\n", "N.B. You must have downloaded your client credentials CSV file before proceeding. Please refer to the API documentation if you have not dowloaded them already. Instructions for downloading your credentials can be found here:\n", "\n", "https://api.sparkcommodities.com/redoc#section/Authentication/Create-an-Oauth2-Client\n" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ ">>>> Found credentials!\n", ">>>> Client_id=01c23590-ef6c-4a36-8237-c89c3f1a3b2a, client_secret=80763****\n", ">>>> Successfully fetched an access token eyJhb****, valid 604799 seconds.\n", ">>>> All the routes you can fetch\n" ] } ], "source": [ "#client_id, client_secret = retrieve_credentials(\n", "# file_path=\"/tmp/client_credentials.csv\"\n", "#)\n", "\n", "client_id, client_secret = retrieve_credentials(file_path=\"/tmp/client_credentials.csv\")\n", "\n", "# Authenticate:\n", "access_token = get_access_token(client_id, client_secret)\n", "\n", "# Fetch all contracts:\n", "tickers, fobPort_names, availablevia, reldates, dicto1 = list_netbacks(access_token)\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Netbacks Import Base Functions" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "## Defining the function\n", "from typing import Literal\n", "\n", "\n", "def fetch_netback(access_token, ticker, release, via=None, laden=None, ballast=None, percent_hire: Literal[0, 100] = 100):\n", " \"\"\"\n", " For a route, fetch then display the route details\n", "\n", " # Procedure:\n", "\n", " Do GET queries to https://api.sparkcommodities.com/v1.0/routes/{route_uuid}/\n", " with a Bearer token authorization HTTP header.\n", " \"\"\"\n", " \n", " query_params = \"?fob-port={}\".format(ticker)\n", " if release is not None:\n", " query_params += \"&release-date={}\".format(release)\n", " if via is not None:\n", " query_params += \"&via-point={}\".format(via)\n", " if laden is not None:\n", " query_params += \"&laden-congestion-days={}\".format(laden)\n", " if ballast is not None:\n", " query_params += \"&ballast-congestion-days={}\".format(ballast)\n", " if percent_hire in [0,100]:\n", " query_params += \"&percent-hire={}\".format(percent_hire)\n", " \n", " content = do_api_get_query(\n", " uri=\"/v1.0/netbacks/{}\".format(query_params),\n", " access_token=access_token,\n", " )\n", " \n", " my_dict = content['data']\n", "\n", " return my_dict\n", "\n", "\n", "# Define formatting data function\n", "def format_store(available_via, fob_names, tickrs):\n", " dict_store = {\n", " \"Index\": [],\n", " \"Callable Ports\": [],\n", " \"Corresponding Ticker\": [],\n", " \"Available Via\": []\n", " }\n", " \n", " c = 0\n", " for a in available_via:\n", " ## Check which routes have non-empty Netbacks data and save indices\n", " if len(a) != 0:\n", " dict_store['Index'].append(c)\n", "\n", " # Use these indices to retrive the corresponding Netbacks info\n", " dict_store['Callable Ports'].append(fob_names[c])\n", " dict_store['Corresponding Ticker'].append(tickrs[c])\n", " dict_store['Available Via'].append(available_via[c])\n", " c += 1\n", " # Show available Netbacks ports in a DataFrame (with corresponding indices)\n", " dict_df = pd.DataFrame(dict_store)\n", " return dict_df\n", "\n", "def netbacks_history(tick, reldates, my_via=None, laden =None, ballast=None, percent_hire: Literal[0, 100] = 100):\n", "\n", " months = []\n", " nea_outrights = []\n", " nwe_outrights = []\n", " release_date = []\n", " \n", " port = []\n", "\n", " for r in reldates:\n", " try:\n", " my_dict = fetch_netback(access_token, tickers[tick], release=r, via=my_via, laden=laden, ballast=ballast,percent_hire=percent_hire)\n", " \n", "\n", " m = my_dict['netbacks'][0]\n", " \n", " months.append(m['load']['month'])\n", "\n", " nea_outrights.append(float(m['nea']['outright']['usdPerMMBtu']))\n", "\n", " nwe_outrights.append(float(m['nwe']['outright']['usdPerMMBtu']))\n", "\n", " release_date.append(my_dict['releaseDate'])\n", " port.append(fobPort_names[tick])\n", " \n", " except:\n", " print('Bad Date: ' + r)\n", " \n", " # Incorporating sleep function\n", " time.sleep(0.5)\n", " \n", " historical_df = pd.DataFrame({\n", " 'Release Date': release_date,\n", " 'FoB Port': port,\n", " 'Month': months,\n", " 'NEA Outrights': nea_outrights,\n", " 'NWE Outrights': nwe_outrights,\n", " })\n", " \n", " historical_df['Release Date'] = pd.to_datetime(historical_df['Release Date'])\n", " \n", " return historical_df\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Freight Hire Calculations\n", "\n", "As the endpoint only allows for 2 options (0% or 100%) for the freight hire percent parameter, we must calculate the netbacks when the cost included is inbetween.\n", "\n", "Here, we select 50% as well as the two available options. However, these values can be altered as necessary." ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [], "source": [ "def calculate_netbacks(my_dict_0, my_dict_100, percent_hire_list):\n", "\n", "\n", " m = pd.merge(my_dict_0,my_dict_100,how='inner',on=['Release Date','FoB Port','Month'],suffixes=(\" 0%\", \" 100%\"))\n", " m['NEA Base Costs'] = m['NEA Outrights 100%'] - m['NEA Outrights 0%']\n", " m['NWE Base Costs'] = m['NWE Outrights 100%'] - m['NWE Outrights 0%']\n", "\n", " for percent_hire in percent_hire_list:\n", " m[f'NEA Outright {percent_hire}%'] = m['NEA Base Costs'] * (percent_hire/100) + m['NEA Outrights 0%']\n", " m[f'NWE Outright {percent_hire}%'] = m['NWE Base Costs'] * (percent_hire/100) + m['NWE Outrights 0%']\n", "\n", " m[f'Arb {percent_hire}%'] = m[f'NEA Outright {percent_hire}%'] - m[f'NWE Outright {percent_hire}%']\n", "\n", "\n", " return m" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "# 2. Calling data and sorting\n", "\n", "In this section, we call the data needed for the US Arb via COGH netback for both 0% Freight Hire and 100% Freight Hire." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Calling and calculating Netbacks\n", "\n", "We select the following Netback." ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [], "source": [ "# Store available netbacks\n", "\n", "available_df = format_store(availablevia,fobPort_names,tickers)" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [], "source": [ "# Choose route ID, price release date and freight hire\n", "\n", "# Select Route\n", "\n", "via ='cogh'\n", "my_t = 'Sabine Pass'\n", "t = fobPort_names.index(my_t)\n", "\n", "my_rels = reldates[:200]\n", "\n", "percent_hires = [0,50,100]\n", "\n", "my_dict_0 = netbacks_history(t,my_rels,via,percent_hire=0)\n", "my_dict_100 = netbacks_history(t,my_rels,via,percent_hire=100)\n" ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [ { "data": { "application/vnd.microsoft.datawrangler.viewer.v0+json": { "columns": [ { "name": "index", "rawType": "int64", "type": "integer" }, { "name": "Release Date", "rawType": "datetime64[ns]", "type": "datetime" }, { "name": "FoB Port", "rawType": "object", "type": "string" }, { "name": "Month", "rawType": "object", "type": "string" }, { "name": "NEA Outrights 0%", "rawType": "float64", "type": "float" }, { "name": "NWE Outrights 0%", "rawType": "float64", "type": "float" }, { "name": "NEA Outrights 100%", "rawType": "float64", "type": "float" }, { "name": "NWE Outrights 100%", "rawType": "float64", "type": "float" }, { "name": "NEA Base Costs", "rawType": "float64", "type": "float" }, { "name": "NWE Base Costs", "rawType": "float64", "type": "float" }, { "name": "NEA Outright 0%", "rawType": "float64", "type": "float" }, { "name": "NWE Outright 0%", "rawType": "float64", "type": "float" }, { "name": "Arb 0%", "rawType": "float64", "type": "float" }, { "name": "NEA Outright 50%", "rawType": "float64", "type": "float" }, { "name": "NWE Outright 50%", "rawType": "float64", "type": "float" }, { "name": "Arb 50%", "rawType": "float64", "type": "float" }, { "name": "NEA Outright 100%", "rawType": "float64", "type": "float" }, { "name": "NWE Outright 100%", "rawType": "float64", "type": "float" }, { "name": "Arb 100%", "rawType": "float64", "type": "float" } ], "conversionMethod": "pd.DataFrame", "ref": "185fb595-99da-4ed3-943a-ddb6af8312bc", "rows": [ [ "0", "2025-03-24 00:00:00", "Sabine Pass", "2025-04", "12.468", "12.482", "11.885", "12.247", "-0.5830000000000002", "-0.23499999999999943", "12.468", "12.482", "-0.013999999999999346", "12.1765", "12.3645", "-0.18799999999999883", "11.885", "12.247", "-0.3620000000000001" ], [ "1", "2025-03-21 00:00:00", "Sabine Pass", "2025-04", "12.447", "12.499", "11.865", "12.261", "-0.581999999999999", "-0.23800000000000132", "12.447", "12.499", "-0.05200000000000138", "12.155999999999999", "12.379999999999999", "-0.2240000000000002", "11.865", "12.261", "-0.395999999999999" ], [ "2", "2025-03-20 00:00:00", "Sabine Pass", "2025-04", "12.599", "12.612", "12.026", "12.386", "-0.5730000000000004", "-0.22600000000000087", "12.599", "12.612", "-0.0129999999999999", "12.3125", "12.498999999999999", "-0.18649999999999878", "12.026", "12.386", "-0.35999999999999943" ], [ "3", "2025-03-19 00:00:00", "Sabine Pass", "2025-04", "12.863", "12.81", "12.334", "12.59", "-0.5289999999999999", "-0.22000000000000064", "12.863", "12.81", "0.05299999999999905", "12.5985", "12.7", "-0.1014999999999997", "12.334", "12.59", "-0.2560000000000002" ], [ "4", "2025-03-18 00:00:00", "Sabine Pass", "2025-04", "12.166", "12.083", "11.695", "11.871", "-0.4710000000000001", "-0.21199999999999974", "12.166", "12.083", "0.08300000000000018", "11.9305", "11.977", "-0.046499999999999986", "11.695", "11.871", "-0.17600000000000016" ] ], "shape": { "columns": 18, "rows": 5 } }, "text/html": [ "
\n", " | Release Date | \n", "FoB Port | \n", "Month | \n", "NEA Outrights 0% | \n", "NWE Outrights 0% | \n", "NEA Outrights 100% | \n", "NWE Outrights 100% | \n", "NEA Base Costs | \n", "NWE Base Costs | \n", "NEA Outright 0% | \n", "NWE Outright 0% | \n", "Arb 0% | \n", "NEA Outright 50% | \n", "NWE Outright 50% | \n", "Arb 50% | \n", "NEA Outright 100% | \n", "NWE Outright 100% | \n", "Arb 100% | \n", "
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | \n", "2025-03-24 | \n", "Sabine Pass | \n", "2025-04 | \n", "12.468 | \n", "12.482 | \n", "11.885 | \n", "12.247 | \n", "-0.583 | \n", "-0.235 | \n", "12.468 | \n", "12.482 | \n", "-0.014 | \n", "12.1765 | \n", "12.3645 | \n", "-0.1880 | \n", "11.885 | \n", "12.247 | \n", "-0.362 | \n", "
1 | \n", "2025-03-21 | \n", "Sabine Pass | \n", "2025-04 | \n", "12.447 | \n", "12.499 | \n", "11.865 | \n", "12.261 | \n", "-0.582 | \n", "-0.238 | \n", "12.447 | \n", "12.499 | \n", "-0.052 | \n", "12.1560 | \n", "12.3800 | \n", "-0.2240 | \n", "11.865 | \n", "12.261 | \n", "-0.396 | \n", "
2 | \n", "2025-03-20 | \n", "Sabine Pass | \n", "2025-04 | \n", "12.599 | \n", "12.612 | \n", "12.026 | \n", "12.386 | \n", "-0.573 | \n", "-0.226 | \n", "12.599 | \n", "12.612 | \n", "-0.013 | \n", "12.3125 | \n", "12.4990 | \n", "-0.1865 | \n", "12.026 | \n", "12.386 | \n", "-0.360 | \n", "
3 | \n", "2025-03-19 | \n", "Sabine Pass | \n", "2025-04 | \n", "12.863 | \n", "12.810 | \n", "12.334 | \n", "12.590 | \n", "-0.529 | \n", "-0.220 | \n", "12.863 | \n", "12.810 | \n", "0.053 | \n", "12.5985 | \n", "12.7000 | \n", "-0.1015 | \n", "12.334 | \n", "12.590 | \n", "-0.256 | \n", "
4 | \n", "2025-03-18 | \n", "Sabine Pass | \n", "2025-04 | \n", "12.166 | \n", "12.083 | \n", "11.695 | \n", "11.871 | \n", "-0.471 | \n", "-0.212 | \n", "12.166 | \n", "12.083 | \n", "0.083 | \n", "11.9305 | \n", "11.9770 | \n", "-0.0465 | \n", "11.695 | \n", "11.871 | \n", "-0.176 | \n", "