diff --git a/README.md b/README.md index 002c5b7c..7a97af2e 100644 --- a/README.md +++ b/README.md @@ -65,9 +65,9 @@ First, copy `.env.sample` file to `.env`, and ensure the configuration values ar | Name | Value | Description | |------|-------|-------------| -|`REACT_APP_API_ROOT`| | The root URL for the STAC API, either prod, staging or a local instance. If the URL ends in 'stac', this is a special case that is handled by replacing 'stac' with the target service, e.g. 'data' or 'sas' +|`REACT_APP_API_ROOT`| | The root URL for the STAC API, either prod, staging or a local instance using . If the URL ends in 'stac', this is a special case that is handled by replacing 'stac' with the target service, e.g. 'data' or 'sas' |`REACT_APP_TILER_ROOT`| Optional | The root URL for the data tiler API, if not hosted from the domain of the STAC API. -|`REACT_APP_IMAGE_API_ROOT`| PC APIs pcfunc endpoint | The root URL for the image data API for animations. +|`REACT_APP_IMAGE_API_ROOT`| PC APIs pcfunc endpoint | The root URL for the image data API for animations. For a local instance use . |`REACT_APP_AZMAPS_CLIENT_ID`| Retrieve from Azure Portal | The Client ID used to authenticate against Azure Maps. |`REACT_APP_ONEDS_TENANT_KEY`| Lookup at | Telemetry key (not needed for dev) |`REACT_APP_AUTH_URL`| Optional. URL to root pc-session-api instance | Used to enable login work. @@ -94,7 +94,7 @@ Note, you may need to assign this role via an identity that has JIT admin privil #### Developing against local STAC assets The `REACT_APP_API_ROOT` can be set to a local instance of the Metadata API if you are -prototyping changes to collections. However, as a shortcut, you can also run the +prototyping changes to collections, e.g., . However, as a shortcut, you can also run the `./scripts/mockstac` script in order to locally serve a static json file from `/mockstac/collections`. Simply alter the contents of the JSON file as you need, and set your `REACT_APP_API_ROOT` value to `http://localhost:8866` and restart @@ -133,14 +133,16 @@ to format all files. The CI system will check for formatting errors. If you're on WSL2, be sure to set up your system to run the Cypress GUI: -You may also need to install cypress locally on your computer, with `npm install cypress`. +You may also need to install cypress locally on your computer, with `npm install cypress` and `./node_modules/.bin/cypress install`. - Install Google Chrome in your WSL2 environment (Cypress ships with a chromium-based electron browser) -- Run `npm cypress:open` to run the GUI and debug tests, or -- Run `npm cypress:run` to run the headless version in the terminal +- Run `npm run cypress:open` to run the GUI and debug tests, or +- Run `npm run cypress:run` to run the headless version in the terminal Both test suites are run from CI. +Note- the cypress tests currently involve the sentinel-2-l2a collection, but running the backend locally only comes out of the box with naip, so the tests will fail. + ## Ports | Service | Port | diff --git a/cypress/e2e/explorer/url_state.cy.js b/cypress/e2e/explorer/url_state.cy.js index c0f2c559..02d84deb 100644 --- a/cypress/e2e/explorer/url_state.cy.js +++ b/cypress/e2e/explorer/url_state.cy.js @@ -88,9 +88,7 @@ describe("URL state is loaded to Explorer", () => { cy.get("[title='Show oldest results first']").should("have.class", "is-checked"); }); - // There is a problem with /mosaic//info path, likely caused by update - // of titiler. https://github.com/microsoft/PlanetaryComputerDataCatalog/issues/476 - it.skip("can specify a custom searchid", () => { + it("can specify a custom searchid", () => { cy.intercept("/api/stac/v1/collections/sentinel-2-l2a").as("getS2"); cy.intercept("/api/data/v1/mosaic/info?collection=sentinel-2-l2a").as( "getS2mosaic" diff --git a/docs/concepts/hub-deployment.md b/docs/concepts/hub-deployment.md index 407f0029..adc470f9 100644 --- a/docs/concepts/hub-deployment.md +++ b/docs/concepts/hub-deployment.md @@ -10,7 +10,7 @@ In this guide you will: * Deploy an [AKS] cluster using the Azure CLI * Deploy JupyterHub and Dask Gateway using the [daskhub] Helm chart. -We describe two deployment scenarios, a [simple](docs/concepts/hub-deployment/#simple-deployment) and a [recommended](docs/concepts/hub-deployment/#recommended-deployment) deployment. If you're new to Azure, Kubernetes, or JupyterHub, then you should try the simple deployment to verify that the basics work, before moving on to the more advanced recommended deployment. Finally, the configuration for the Planetary Computer based JupyeteHub is available on [GitHub](https://github.com/microsoft/planetary-computer-hub), which provides a reference for a real-world deployment. +We describe two deployment scenarios, a [simple](docs/concepts/hub-deployment/#simple-deployment) and a [recommended](docs/concepts/hub-deployment/#recommended-deployment) deployment. If you're new to Azure, Kubernetes, or JupyterHub, then you should try the simple deployment to verify that the basics work, before moving on to the more advanced recommended deployment. Finally, the configuration for the Planetary Computer based JupyterHub is available on [GitHub](https://github.com/microsoft/planetary-computer-hub), which provides a reference for a real-world deployment. For background, we recommend reading the [Zero to JupyterHub with Kubernetes][z2jh] guide and the [Dask Gateway on Kubernetes][gateway-k8s] documentation. diff --git a/docs/concepts/sas.md b/docs/concepts/sas.md index f115cff2..15710d35 100644 --- a/docs/concepts/sas.md +++ b/docs/concepts/sas.md @@ -8,15 +8,17 @@ The Data Authentication API enables users to create access tokens that can be us A SAS token is needed whenever you want to access Planetary Computer data at an Azure Blob URL. For example, an Azure Blob URL looks like: - +``` +https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/141/045/LC08_L2SP_141045_20210329_20210402_02_T1/LC08_L2SP_141045_20210329_20210402_02_T1_SR_B1.TIF +``` ### Requesting a token +The `token` endpoint allows for the generation of a SAS token for a given dataset identified by it's STAC collection ID. If you know the Azure Blob storage account and container where the data is located, you can also use the endpoint that takes that information in its path: + > [https://planetarycomputer.microsoft.com/api/sas/v1/token/{collection_id}](../reference/sas.md) > [https://planetarycomputer.microsoft.com/api/sas/v1/token/{storage_account}/{container}](../reference/sas.md) -The `token` endpoint allows for the generation of a SAS token for a given dataset identified by it's STAC collection ID. If you know the Azure Blob storage account and container where the data is located, you can also use the endpoint that takes that information in its path. - The token generated with these endpoints can then be used for all requests for that same dataset. For example, to obtain a SAS token for the `naip` dataset, a request can be made to: . If you click on that link, you should see something like: ```json diff --git a/docs/overview/about.md b/docs/overview/about.md index e341aa62..6496388c 100644 --- a/docs/overview/about.md +++ b/docs/overview/about.md @@ -2,22 +2,25 @@ The Microsoft Planetary Computer is a platform that lets users leverage the power of the cloud to accelerate environmental sustainability and Earth science. -The Planetary Computer consists of four major components: +The Planetary Computer consists of three major components: - The [Data Catalog](https://planetarycomputer.microsoft.com/catalog), which includes petabytes of data about Earth systems, hosted on Azure and made available to users for free. -- [APIs](../concepts/stac.md) that allow users to search for the data they need across space and time. +- [APIs](../reference/stac.md) that allow users to search for the data they need across space and time. - [Applications](https://planetarycomputer.microsoft.com/applications), built by our network of partners, that put the Planetary Computer platform to work for environmental sustainability. ## Built on Open The Planetary Computer uses open source tools and supports open standards. In fact, the foundation of the Planetary Computer is the incredible ecosystem of tools being developed in the open by our partners and the much broader open source community. For example, our API builds on the work done by the [STAC](https://stacspec.org/) community to streamline and standardize the cataloging and discovery of geospatial data. -Many of the Planetary Computer components are also open-source. These provide guidance on how to tie together open-source libraries on Azure for geospatial and environmental data analysis. +Many of the Planetary Computer components are also open-source. These provide guidance on how to tie together open-source libraries on Azure for geospatial and environmental data analysis. The various Planetary Computer GitHub repositories are listed below: | GitHub repository | Purpose | |-------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | [Microsoft/planetary-computer-apis](https://github.com/Microsoft/planetary-computer-apis) | Deploys the [STAC](https://planetarycomputer.microsoft.com/docs/reference/stac/) and [data](https://planetarycomputer.microsoft.com/docs/reference/data/) APIs | | [Microsoft/PlanetaryComputerExamples](https://github.com/microsoft/planetarycomputerexamples) | Contains notebooks with examples for each dataset, quickstarts, and tutorials for using the Planetary Computer | +| [Microsoft/PlanetaryComputerDataCatalog](https://github.com/microsoft/PlanetaryComputerDataCatalog) | Contains homepage, data catalog, docs, and visualizations for the Planetary Computer | +| [Microsoft/PlanetaryComputer](https://github.com/microsoft/PlanetaryComputer) | Hosts issues and discussions that users can use to provide feedback and get help in using the Planetary Computer | +| [Microsoft/planetary-computer-sdk-for-python](https://github.com/microsoft/planetary-computer-sdk-for-python) | A Python library for interacting with the Planetary Computer | ## About the Preview @@ -26,8 +29,6 @@ The Planetary Computer data and APIs are publicly accessible and can be used wit - The [STAC API](../reference/stac) is public and can be accessed anonymously. - Most data can be downloaded anonymously, but will be throttled. See [Reading data from the STAC API](../quickstarts/reading-stac.ipynb) for an introduction and [Using Tokens for Data Access](../concepts/sas) for more background on accessing data. -We're just getting started. Check back for updated documentation and new features! - ```{tip} To report issues, ask questions, or engage in community discussions please visit our [GitHub repository](https://github.com/microsoft/PlanetaryComputer). ``` @@ -47,7 +48,7 @@ The Planetary Computer is just one component of Microsoft's commitment to enviro - Learn more about Microsoft's [environmental sustainability program](https://www.microsoft.com/en-us/corporate-responsibility/sustainability). - Learn more about the work done by AI for Earth grantees at the [AI for Earth grantee gallery](https://aka.ms/ai4egrantees). -- Microsoft's [AI for Earth program](https://aka.ms/aiforearth) also provides open source technical resources for conservation work; check out our [machine learning APIs](https://aka.ms/ai4eapis), and read about our tools for [accelerating biodiversity surveys with AI](https://aka.ms/biodiversitysurveys) and [AI-accelerated land cover analysis](https://aka.ms/landcovermapping). +- Read about our tools for [accelerating biodiversity surveys with AI](https://aka.ms/biodiversitysurveys) and [AI-accelerated land cover analysis](https://aka.ms/landcovermapping). ## Cite the Planetary Computer diff --git a/docs/overview/explorer.md b/docs/overview/explorer.md index 28ea3e05..4f3076bf 100644 --- a/docs/overview/explorer.md +++ b/docs/overview/explorer.md @@ -37,7 +37,7 @@ In the catalog popup, you can view datasets by category and filter results again ``` Datasets can also be opened in the Explorer directly from their -[catalog][pc-catalog] page. These buttons will launch the Explorer at a +[catalog][pc-catalog] page. The "Launch in Explorer" button will launch the Explorer at a location that has data availability and an interesting visualization. ```{image} images/explorer-launch.jpg @@ -49,12 +49,10 @@ location that has data availability and an interesting visualization. After selecting a dataset, a pre-configured query and appropriate rendering options are automatically selected to get you started. Many datasets have several options for both, while others are best represented by a single query -and rendering option. Customizing both of these presets is on our roadmap. -While using the Explorer, the URL in your web browser will contain the relevant +and rendering option. While using the Explorer, the URL in your web browser will contain the relevant selections, and can be bookmarked or shared. -Currently, only datasets with GeoTiff data assets are available in the Explorer, -but support for additional formats is planned. +Currently, only datasets with GeoTIFF data assets are available in the Explorer. ## Viewing results @@ -81,7 +79,7 @@ list, you will return to your original viewport and search results. ``` Depending on how restrictive your query is, or the general availability of -a dataset in a certain area, they may be "gaps" in the map layer. Since this is +a dataset in a certain area, there may be "gaps" in the map layer. Since this is a mosaicked view, that is expected. Similarly, for imagery datasets, the images stitched together may be from different days, under different weather conditions, and won't necessarily be a seamless mosaic. @@ -98,8 +96,8 @@ to direct you to the valid extent. ### Custom filters -Each dataset is preconfigured with a set of filters that can be used to find -representative items in a dataset quickly. The Explorer also has an Advanced mode +Each dataset is preconfigured with a set of filters that can be used to quickly find +representative items in a dataset. The Explorer also has an Advanced mode that allows you to customize the filters for your query. To activate Advanced mode, simply select the dataset you're interested in and click the "Advanced" link. ```{image} images/explorer-advanced.jpg @@ -110,7 +108,7 @@ that allows you to customize the filters for your query. To activate Advanced mo The custom filter will start with a date-range filter by default, but you can click the "Select filters" link to select any number of attributes on the -dataset to use in a custom filter. Selected attributes will be displayed in a +dataset to use within your custom filter. Selected attributes will be displayed in a custom control relevant to its data type to help ensure that your filter is valid and will return results. @@ -128,7 +126,7 @@ variable, each rendering option will be a single asset with an appropriate color map applied. For example, [Harmonized Global Biomass][hgb] has four assets, each representing a different measure of stored carbon. Imagery datasets on the other hand, list various rendering options that combine bands into well known RGB -interpretations or expressions, like false color or NDVI. To see a description of the +interpretations or expressions, like false color or Normalized Difference Vegetation Index (NDVI). To see a description of the selected rendering option, click the info button next to the search results. ```{image} images/explorer-queryinfo.png @@ -184,15 +182,13 @@ water extent of Lake Powell, UT. :class: no-scaled-link ``` -### Working with results with Python +### Working with results in Python Finding and visualizing data items is likely only the first step in your data analysis workflow. The Explorer provides two options to move your search results into a compute environment. By clicking "Code snippet for search results" under the search results, you can generate a Python snippet to recreate your exact -search covering the map viewport. - -Or, if you're interested in working with a single item you've found, you can +search covering the map viewport. Or, if you're interested in working with a single item you've found, you can generate a Python snippet by clicking the "code" (`{ }`) button which will load that single item. @@ -200,11 +196,9 @@ Since our data and APIs are accessible to anyone without an account, these snippets can be run in any compute environment. Please refer to our [data access documentation][pc-docs-sas] for more details. -## Coming soon - -We're constantly adding improvements to the Planetary Computer and the Explorer. +## Feedback -Watch for future announcements, and [visit our GitHub Discussions](https://github.com/microsoft/PlanetaryComputer/discussions) for feedback or questions. +[Visit our GitHub Discussions](https://github.com/microsoft/PlanetaryComputer/discussions) for feedback or questions. [1]: https://planetarycomputer.microsoft.com/explore [pc-docs-api]: https://planetarycomputer.microsoft.com/docs/quickstarts/reading-stac/ diff --git a/docs/overview/images/explorer-queryinfo.jpg b/docs/overview/images/explorer-queryinfo.jpg deleted file mode 100644 index 313cdc85..00000000 Binary files a/docs/overview/images/explorer-queryinfo.jpg and /dev/null differ diff --git a/docs/overview/images/explorer-queryinfo.png b/docs/overview/images/explorer-queryinfo.png index f3062e37..0e2110d3 100644 Binary files a/docs/overview/images/explorer-queryinfo.png and b/docs/overview/images/explorer-queryinfo.png differ diff --git a/docs/overview/ui-codespaces.md b/docs/overview/ui-codespaces.md index 544ecd32..606cf862 100644 --- a/docs/overview/ui-codespaces.md +++ b/docs/overview/ui-codespaces.md @@ -2,7 +2,7 @@ [GitHub Codespaces][codespaces] is a development environment that's hosted in the cloud. -The easiest way to get started is to [fork] the [Planetary Computer Examples][examples] repository and create a new Codespace (we recommend [setting a default region](#setting-a-default-region) first). +The easiest way to get started is to [fork] the [Planetary Computer Examples][examples] repository and create a new Codespace (we recommend [setting a default region](#set-a-default-region) first). ![Start codespaces from the "Code" dropdown.](images/codespaces-start.png) @@ -10,11 +10,11 @@ This will create a Codespace under your GitHub account that's configured to work ![A preview of GitHub Codespaces in the browser.](images/codespaces-browser.png) -When you launch a notebook you'll be prompted to select a kernel. Choose the `'notebook': conda` kernel. +When you launch a notebook, you'll be prompted to select a kernel. Choose the `'notebook': conda` kernel. ## Set a default region -We recommend that you create your Codespaces in the West Europe. You can do this per-Codespace, or set a [default region][region]. +We recommend that you create your Codespaces in the West Europe region. You can do this per-Codespace, or set a [default region][region]. ![Set the default region](images/codespaces-region.png) diff --git a/package.json b/package.json index c9755ebe..4ecf3361 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "pc-datacatalog", - "version": "2024.9.1", + "version": "2024.9.3", "private": true, "proxy": "http://api:7071/", "dependencies": { @@ -103,4 +103,4 @@ "prettier": "^2.7.1", "yaml-loader": "^0.8.0" } -} \ No newline at end of file +} diff --git a/src/components/Header/styles.ts b/src/components/Header/styles.ts index 6fe64c6b..9c9d4bf9 100644 --- a/src/components/Header/styles.ts +++ b/src/components/Header/styles.ts @@ -35,6 +35,18 @@ export const logoImageStyle = mergeStyles({ lineHeight: "1", }); +export const skipContentStyle = mergeStyles({ + left: "50%", + position: "absolute", + transform: "translateY(-250%)", + padding: "8px", + transition: "transform 0.3s", + + "&:focus": { + transform: "translateY(0%)", + }, +}); + export const headerPipeStyle = mergeStyles({ marginTop: "0px !important", marginLeft: "7px !important", diff --git a/src/pages/Docs/Docs.index.tsx b/src/pages/Docs/Docs.index.tsx index 3df1922f..6faa53ec 100644 --- a/src/pages/Docs/Docs.index.tsx +++ b/src/pages/Docs/Docs.index.tsx @@ -9,6 +9,7 @@ import DocsHtmlContent from "./components/DocsHtmlContent"; import Topic from "./components/Topic"; import { DATA_URL, SAS_URL, STAC_URL } from "../../utils/constants"; import ScrollToTop from "../../components/ScrollToTop"; +import { skipContentStyle } from "components/Header/styles"; const OpenApiSpec = React.lazy(() => import("./components/OpenApiSpec")); @@ -59,6 +60,15 @@ const Docs = () => { const documentationPane = (
+ { + const urlWithoutHash = window.location.href.split("#")[0]; + window.location.href = urlWithoutHash + "#generated-docs-content"; + }} + > + Skip to content + {tocComponent}
{breadcrumb} diff --git a/src/pages/Docs/components/DocsHtmlContent.tsx b/src/pages/Docs/components/DocsHtmlContent.tsx index dbcf0735..5af1d929 100644 --- a/src/pages/Docs/components/DocsHtmlContent.tsx +++ b/src/pages/Docs/components/DocsHtmlContent.tsx @@ -9,12 +9,14 @@ import { a11yPostProcessDom } from "../../../utils"; interface DocsHtmlContentProps { className: string; markupJson: any; + idText?: string; } const DocsHtmlContent: React.FC = ({ className, markupJson, children, + idText, }) => { const navigate = useNavigate(); const contentRef = useRef(null); @@ -114,7 +116,7 @@ const DocsHtmlContent: React.FC = ({ : DOMPurify.sanitize(bodyWithRoutedLinks); const content = processedMarkup ? ( -
+
{children}
= ({ topics }) => { <>