From e0fdd3f489590168b877a967687026d347b304c9 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 2 Jul 2024 15:44:11 +0200 Subject: [PATCH 01/25] Merge release into main branch (#517) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: dependabot[bot] Co-authored-by: Jesús Arroyo Torrens Co-authored-by: Valentin de la Cruz Barquero <6054336+vdelacruzb@users.noreply.github.com> Co-authored-by: Javier Goizueta Co-authored-by: Alberto Hernández Co-authored-by: Pedro-Juan Ferrer Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: vdelacruzb Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 5 +++++ clouds/bigquery/CHANGELOG.md | 4 ++++ clouds/bigquery/version | 2 +- clouds/snowflake/CHANGELOG.md | 4 ++++ clouds/snowflake/libraries/javascript/package.json | 5 ++++- clouds/snowflake/version | 2 +- 6 files changed, 19 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 70682c788..f88bf367b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,11 @@ CARTO Analytics Toolbox Core. All notable commits to this project will be documented in this file. +## 2024-06-27 + +- chore(sf): refactor at snowflake native app to an installer (#512) +- chore(bq|quadbin): optimize polyfill (#513) + ## 2024-05-21 - refactor(sf|h3): avoid memory limit exceeded in H3_POLYFILL_TABLE (#501) diff --git a/clouds/bigquery/CHANGELOG.md b/clouds/bigquery/CHANGELOG.md index 2e7b143db..d25171df8 100644 --- a/clouds/bigquery/CHANGELOG.md +++ b/clouds/bigquery/CHANGELOG.md @@ -4,6 +4,10 @@ CARTO Analytics Toolbox Core for BigQuery. All notable commits to this project will be documented in this file. +## [1.2.3] - 2024-06-27 + +- chore(quadbin): optimize polyfill (#513) + ## [1.2.2] - 2024-04-18 - docs(processing): update voronoi doc (#492) diff --git a/clouds/bigquery/version b/clouds/bigquery/version index 23aa83906..0495c4a88 100644 --- a/clouds/bigquery/version +++ b/clouds/bigquery/version @@ -1 +1 @@ -1.2.2 +1.2.3 diff --git a/clouds/snowflake/CHANGELOG.md b/clouds/snowflake/CHANGELOG.md index 31d31204c..176a981d9 100644 --- a/clouds/snowflake/CHANGELOG.md +++ b/clouds/snowflake/CHANGELOG.md @@ -4,6 +4,10 @@ CARTO Analytics Toolbox Core for Snowflake. All notable commits to this project will be documented in this file. +## [1.2.4] - 2024-06-27 + +- chore(sf): refactor at snowflake native app to an installer (#512) + ## [1.2.3] - 2024-05-21 - refactor(h3): avoid memory limit exceeded in H3_POLYFILL_TABLE (#501) diff --git a/clouds/snowflake/libraries/javascript/package.json b/clouds/snowflake/libraries/javascript/package.json index 86fbebf39..9fb6f405f 100644 --- a/clouds/snowflake/libraries/javascript/package.json +++ b/clouds/snowflake/libraries/javascript/package.json @@ -6,12 +6,15 @@ "license": "BSD-3-Clause", "private": true, "dependencies": { - "@math.gl/web-mercator": "^3.6.2", + "@math.gl/web-mercator": "3.6.3", "@mapbox/tile-cover": "3.0.2", "@mapbox/tilebelt": "1.0.2", "long": "4.0.0", "@turf/turf": "6.3.0", "h3-js": "3.7.2", "@placekey/placekey": "1.0.3" + }, + "resolutions": { + "@turf/bbox": "6.5.0" } } diff --git a/clouds/snowflake/version b/clouds/snowflake/version index 0495c4a88..e8ea05db8 100644 --- a/clouds/snowflake/version +++ b/clouds/snowflake/version @@ -1 +1 @@ -1.2.3 +1.2.4 From e1ab4b4128866f3d9e2b27363ba4846aec1f5d56 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Jul 2024 15:51:11 +0200 Subject: [PATCH 02/25] chore(deps): bump scipy from 0.12.0 to 0.12.1 in /clouds/redshift/libraries/python (#518) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- clouds/redshift/libraries/python/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clouds/redshift/libraries/python/requirements.txt b/clouds/redshift/libraries/python/requirements.txt index cc61ca38d..da02dd9d3 100644 --- a/clouds/redshift/libraries/python/requirements.txt +++ b/clouds/redshift/libraries/python/requirements.txt @@ -2,6 +2,6 @@ quadbin==0.2.2 geojson==2.5.0 pygc==1.1.0 numpy==1.8.2 -scipy==0.12.0 +scipy==0.12.1 s2sphere==0.2.5 mercantile==1.2.1 From d63d5ff0f29a26286704a6214443c70b976df872 Mon Sep 17 00:00:00 2001 From: Valentin de la Cruz Barquero <6054336+vdelacruzb@users.noreply.github.com> Date: Thu, 8 Aug 2024 16:14:53 +0200 Subject: [PATCH 03/25] docs(sf): fix native apps installation doc (#519) --- clouds/snowflake/native_app/README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/clouds/snowflake/native_app/README.md b/clouds/snowflake/native_app/README.md index 4e2a27fca..36764875d 100644 --- a/clouds/snowflake/native_app/README.md +++ b/clouds/snowflake/native_app/README.md @@ -8,7 +8,7 @@ The CARTO Analytics Toolbox for Snowflake is composed of a set of user-defined f #### Install the Analytics Toolbox -This Native App is an installer so it does not contain the actual Analytics Toolbox functions and procedures. For the sake of documenting the process, we'll will assume a database named CARTO, as well as a schema named CARTO in that database, also we assume the app to be called CARTO_INSTALLER. The next guidelines and examples will assume that in order to simplify the onboarding process. +This Native App is an installer so it does not contain the actual Analytics Toolbox functions and procedures. For the sake of documenting the process, we'll will assume a database named CARTO, as well as a schema named CARTO in that database, also we assume the app to be called CARTO_ANALYTICS_TOOLBOX. The next guidelines and examples will assume that in order to simplify the onboarding process. All the database, schema and user can have a different name, but remember to adapt the code snippets accordingly. @@ -26,11 +26,11 @@ CREATE SCHEMA CARTO.CARTO; GRANT ALL ON SCHEMA CARTO.CARTO TO ROLE SYSADMIN; -- Set create function and procedure permissions -GRANT USAGE ON DATABASE CARTO TO APPLICATION CARTO; -GRANT USAGE, CREATE FUNCTION, CREATE PROCEDURE ON SCHEMA CARTO.CARTO TO APPLICATION CARTO; +GRANT USAGE ON DATABASE CARTO TO APPLICATION CARTO_ANALYTICS_TOOLBOX; +GRANT USAGE, CREATE FUNCTION, CREATE PROCEDURE ON SCHEMA CARTO.CARTO TO APPLICATION CARTO_ANALYTICS_TOOLBOX; -- Generate the installer procedure in the specified location -CALL CARTO_INSTALLER.CARTO.GENERATE_INSTALLER('CARTO.CARTO'); +CALL CARTO_ANALYTICS_TOOLBOX.CARTO.GENERATE_INSTALLER('CARTO.CARTO'); -- Update ownership of the install procedure GRANT OWNERSHIP ON PROCEDURE CARTO.CARTO.INSTALL(STRING, STRING) TO ROLE ACCOUNTADMIN REVOKE CURRENT GRANTS; @@ -44,7 +44,7 @@ GRANT USAGE ON FUTURE FUNCTIONS IN SCHEMA CARTO.CARTO TO ROLE PUBLIC; GRANT USAGE ON FUTURE PROCEDURES IN SCHEMA CARTO.CARTO TO ROLE PUBLIC; -- Install the Analytics Toolbox in CARTO.CARTO -CALL CARTO.CARTO.INSTALL('CARTO_INSTALLER', 'CARTO.CARTO'); +CALL CARTO.CARTO.INSTALL('CARTO_ANALYTICS_TOOLBOX', 'CARTO.CARTO'); ``` ### Usage Examples From 501e3730d32d58a2e948e86f9c17961e71bb14aa Mon Sep 17 00:00:00 2001 From: DeanSherwin Date: Tue, 20 Aug 2024 10:04:10 +0200 Subject: [PATCH 04/25] fix(pg): lock numpy to v1.24.4 until pandas supports 2.X.X (#520) --- clouds/postgres/common/python3_requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clouds/postgres/common/python3_requirements.txt b/clouds/postgres/common/python3_requirements.txt index ab1c68abe..b97632d9f 100644 --- a/clouds/postgres/common/python3_requirements.txt +++ b/clouds/postgres/common/python3_requirements.txt @@ -14,4 +14,4 @@ pandas==1.3.2 SQLAlchemy==1.4.23 mapbox-vector-tile==1.2.1 sqlfluff==1.3.1 - +numpy==1.24.4 From 259895d5d78f10cb3a7d05ec39bab823fec22cee Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 18:25:21 +0200 Subject: [PATCH 05/25] Merge release into main branch (#522) --- CHANGELOG.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f88bf367b..ad0ccff19 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,12 @@ CARTO Analytics Toolbox Core. All notable commits to this project will be documented in this file. +## 2024-08-22 + +- chore(rs): bump scipy from 0.12.0 to 0.12.1 in /clouds/redshift/libraries/python (#518) +- docs(sf): fix native apps installation doc (#519) +- fix(pg): lock numpy to v1.24.4 until pandas supports 2.X.X (#520) + ## 2024-06-27 - chore(sf): refactor at snowflake native app to an installer (#512) From 874ffae11631e46d921919e451b4233529eee937 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Olaya?= Date: Tue, 3 Sep 2024 12:53:35 +0200 Subject: [PATCH 06/25] feat(sf): added warehouse option for SF (#524) --- clouds/snowflake/.env.template | 1 + clouds/snowflake/common/run-query.js | 3 ++- clouds/snowflake/common/run-script.js | 3 ++- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/clouds/snowflake/.env.template b/clouds/snowflake/.env.template index 0bfa4262b..967be5aa0 100644 --- a/clouds/snowflake/.env.template +++ b/clouds/snowflake/.env.template @@ -13,3 +13,4 @@ SF_USER= SF_PASSWORD= SF_ROLE= # optional SF_SHARE= # optional +SF_WAREHOUSE= # optional diff --git a/clouds/snowflake/common/run-query.js b/clouds/snowflake/common/run-query.js index e5274e112..f51b92a62 100755 --- a/clouds/snowflake/common/run-query.js +++ b/clouds/snowflake/common/run-query.js @@ -8,7 +8,8 @@ const connection = snowflake.createConnection({ account: process.env.SF_ACCOUNT, username: process.env.SF_USER, password: process.env.SF_PASSWORD, - role: process.env.SF_ROLE + role: process.env.SF_ROLE, + warehouse: process.env.SF_WAREHOUSE, }); connection.connect((err) => { diff --git a/clouds/snowflake/common/run-script.js b/clouds/snowflake/common/run-script.js index 902808742..d3203d4ec 100755 --- a/clouds/snowflake/common/run-script.js +++ b/clouds/snowflake/common/run-script.js @@ -18,7 +18,8 @@ const connection = snowflake.createConnection({ account: process.env.SF_ACCOUNT, username: process.env.SF_USER, password: process.env.SF_PASSWORD, - role: process.env.SF_ROLE + role: process.env.SF_ROLE, + warehouse: process.env.SF_WAREHOUSE, }); connection.connect((err) => { From 775fb874e85e627aff0e92564c86ae691356a098 Mon Sep 17 00:00:00 2001 From: Valentin de la Cruz Barquero <6054336+vdelacruzb@users.noreply.github.com> Date: Thu, 5 Sep 2024 11:56:48 +0200 Subject: [PATCH 07/25] chore(bq): increse jest timeout to 30000 (#525) --- clouds/bigquery/modules/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clouds/bigquery/modules/Makefile b/clouds/bigquery/modules/Makefile index 521fe1ce2..61892d7b8 100644 --- a/clouds/bigquery/modules/Makefile +++ b/clouds/bigquery/modules/Makefile @@ -85,7 +85,7 @@ test: check $(NODE_MODULES_DEV) if [ ! -z "$$TESTS" ]; then \ GOOGLE_APPLICATION_CREDENTIALS=$(GOOGLE_APPLICATION_CREDENTIALS) \ PATH="$(NODE_MODULES_DEV)/.bin/:$(PATH)" \ - jest --testTimeout=250000 $(BAIL) --verbose --slowTestThreshold=20 --maxConcurrency=10 $$TESTS \ + jest --testTimeout=300000 $(BAIL) --verbose --slowTestThreshold=20 --maxConcurrency=10 $$TESTS \ --setupFilesAfterEnv "$(COMMON_DIR)/test-extend.js" || exit 1; \ OLD_TEST=$(TEST_DIR)/$$m/old-test; \ if [ -d $$OLD_TEST ]; then \ From 4a2376e34933d7e5e1f415559894d58b0155cfab Mon Sep 17 00:00:00 2001 From: Valentin de la Cruz Barquero <6054336+vdelacruzb@users.noreply.github.com> Date: Wed, 18 Sep 2024 11:46:06 +0200 Subject: [PATCH 08/25] docs(sf): add docs on how to update the analytics toolbox from a native app (#527) --- clouds/snowflake/native_app/README.md | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/clouds/snowflake/native_app/README.md b/clouds/snowflake/native_app/README.md index 36764875d..2a35ba16b 100644 --- a/clouds/snowflake/native_app/README.md +++ b/clouds/snowflake/native_app/README.md @@ -47,6 +47,26 @@ GRANT USAGE ON FUTURE PROCEDURES IN SCHEMA CARTO.CARTO TO ROLE PUBLIC; CALL CARTO.CARTO.INSTALL('CARTO_ANALYTICS_TOOLBOX', 'CARTO.CARTO'); ``` +##### * Update the Analytics Toolbox + +When a new package of the Analytics Toolbox gets released, the Analytics Toolbox installer gets updated automatically. As in the first installation most of the required permissions were already set, only the next queries should be necessary in order to update the Analytics Toolbox. + +``` +-- Set admin permissions +USE ROLE ACCOUNTADMIN; + +-- Generate the installer procedure in the specified location +CALL CARTO_ANALYTICS_TOOLBOX.CARTO.GENERATE_INSTALLER('CARTO.CARTO'); + +-- Update ownership of the install procedure +GRANT OWNERSHIP ON PROCEDURE CARTO.CARTO.INSTALL(STRING, STRING) TO ROLE ACCOUNTADMIN REVOKE CURRENT GRANTS; + +-- Install the Analytics Toolbox in CARTO.CARTO +CALL CARTO.CARTO.INSTALL('CARTO_ANALYTICS_TOOLBOX', 'CARTO.CARTO'); +``` + +If your Analytics Toolbox doesn't get updated properly please try to drop the app, get it back from Snowflake Marketplace and follow the Step 1. + ### Usage Examples Please refer to CARTO's [SQL reference](https://docs.carto.com/data-and-analysis/analytics-toolbox-for-snowflake/sql-reference) to find the full list of available functions and procedures as well as examples. From 05a4bb14c04355764a78713fa690cf2b40ab47f2 Mon Sep 17 00:00:00 2001 From: Valentin de la Cruz Barquero <6054336+vdelacruzb@users.noreply.github.com> Date: Fri, 20 Sep 2024 15:02:50 +0200 Subject: [PATCH 09/25] chore(sf): update python version on stored procedures from 3.8 to 3.9 (#528) --- clouds/snowflake/native_app/SETUP_SCRIPT.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clouds/snowflake/native_app/SETUP_SCRIPT.sql b/clouds/snowflake/native_app/SETUP_SCRIPT.sql index 967e3204d..9983f186f 100644 --- a/clouds/snowflake/native_app/SETUP_SCRIPT.sql +++ b/clouds/snowflake/native_app/SETUP_SCRIPT.sql @@ -1,7 +1,7 @@ CREATE OR REPLACE PROCEDURE @@SF_APP_SCHEMA@@.GET_MODULES_SQL_FROM_STAGE() returns string language python - runtime_version = '3.8' + RUNTIME_VERSION = '3.9' packages = ('snowflake-snowpark-python') imports = ( '/get_modules_sql_from_stage.py', From 1dfc364771d892000bf7b701576b99cceb123f10 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 11:03:25 +0200 Subject: [PATCH 10/25] Merge release into main branch (#530) --- CHANGELOG.md | 7 +++++++ clouds/bigquery/modules/sql/quadbin/QUADBIN_POLYFILL.sql | 4 ++-- clouds/snowflake/CHANGELOG.md | 8 +++++++- clouds/snowflake/README.md | 1 + clouds/snowflake/common/run-query.js | 2 +- clouds/snowflake/common/run-script.js | 2 +- .../snowflake/modules/sql/quadbin/QUADBIN_FROMLONGLAT.sql | 1 - clouds/snowflake/modules/sql/quadbin/QUADBIN_TOPARENT.sql | 2 +- clouds/snowflake/version | 2 +- 9 files changed, 21 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ad0ccff19..8000d888a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ CARTO Analytics Toolbox Core. All notable commits to this project will be documented in this file. +## 2024-09-23 + +- feat(sf): added warehouse option for SF (#524) +- chore(bq): increse jest timeout to 30000 (#525) +- docs(sf): add docs on how to update the analytics toolbox from a native app (#527) +- chore(sf): update python version on stored procedures from 3.8 to 3.9 (#528) + ## 2024-08-22 - chore(rs): bump scipy from 0.12.0 to 0.12.1 in /clouds/redshift/libraries/python (#518) diff --git a/clouds/bigquery/modules/sql/quadbin/QUADBIN_POLYFILL.sql b/clouds/bigquery/modules/sql/quadbin/QUADBIN_POLYFILL.sql index 93b69f8b9..9372273b8 100644 --- a/clouds/bigquery/modules/sql/quadbin/QUADBIN_POLYFILL.sql +++ b/clouds/bigquery/modules/sql/quadbin/QUADBIN_POLYFILL.sql @@ -76,9 +76,9 @@ RETURNS INT64 AS (( WITH __geog_area AS ( SELECT + 508164597540055.75 AS q0_area, ST_AREA(geog) AS geog_area, - COS(ST_Y(ST_CENTROID(geog)) * ACOS(-1) / 180) AS cos_geog_lat, - 508164597540055.75 AS q0_area + COS(ST_Y(ST_CENTROID(geog)) * ACOS(-1) / 180) AS cos_geog_lat ) -- compute the resolution of cells that match the geog area SELECT IF(geog_area > 0 AND cos_geog_lat > 0, diff --git a/clouds/snowflake/CHANGELOG.md b/clouds/snowflake/CHANGELOG.md index 176a981d9..cd3735e94 100644 --- a/clouds/snowflake/CHANGELOG.md +++ b/clouds/snowflake/CHANGELOG.md @@ -4,9 +4,15 @@ CARTO Analytics Toolbox Core for Snowflake. All notable commits to this project will be documented in this file. +## [1.2.5] - 2024-09-23 + +- feat: added warehouse option for SF (#524) +- docs: add docs on how to update the analytics toolbox from a native app (#527) +- chore: update python version on stored procedures from 3.8 to 3.9 (#528) + ## [1.2.4] - 2024-06-27 -- chore(sf): refactor at snowflake native app to an installer (#512) +- chore: refactor at snowflake native app to an installer (#512) ## [1.2.3] - 2024-05-21 diff --git a/clouds/snowflake/README.md b/clouds/snowflake/README.md index d87854d61..2cd72ac99 100644 --- a/clouds/snowflake/README.md +++ b/clouds/snowflake/README.md @@ -24,6 +24,7 @@ SF_USER= SF_PASSWORD= SF_ROLE= # optional SF_SHARE= # optional +SF_WAREHOUSE= # optional ``` ## Structure diff --git a/clouds/snowflake/common/run-query.js b/clouds/snowflake/common/run-query.js index f51b92a62..e9c0ef6eb 100755 --- a/clouds/snowflake/common/run-query.js +++ b/clouds/snowflake/common/run-query.js @@ -9,7 +9,7 @@ const connection = snowflake.createConnection({ username: process.env.SF_USER, password: process.env.SF_PASSWORD, role: process.env.SF_ROLE, - warehouse: process.env.SF_WAREHOUSE, + warehouse: process.env.SF_WAREHOUSE }); connection.connect((err) => { diff --git a/clouds/snowflake/common/run-script.js b/clouds/snowflake/common/run-script.js index d3203d4ec..55d015c93 100755 --- a/clouds/snowflake/common/run-script.js +++ b/clouds/snowflake/common/run-script.js @@ -19,7 +19,7 @@ const connection = snowflake.createConnection({ username: process.env.SF_USER, password: process.env.SF_PASSWORD, role: process.env.SF_ROLE, - warehouse: process.env.SF_WAREHOUSE, + warehouse: process.env.SF_WAREHOUSE }); connection.connect((err) => { diff --git a/clouds/snowflake/modules/sql/quadbin/QUADBIN_FROMLONGLAT.sql b/clouds/snowflake/modules/sql/quadbin/QUADBIN_FROMLONGLAT.sql index e56c62d90..cf4b8ce49 100644 --- a/clouds/snowflake/modules/sql/quadbin/QUADBIN_FROMLONGLAT.sql +++ b/clouds/snowflake/modules/sql/quadbin/QUADBIN_FROMLONGLAT.sql @@ -53,4 +53,3 @@ AS $$ SELECT @@SF_SCHEMA@@._QUADBIN_STRING_TOINT(@@SF_SCHEMA@@._QUADBIN_FROMZXY(z, x, y)) FROM __zxy $$; - diff --git a/clouds/snowflake/modules/sql/quadbin/QUADBIN_TOPARENT.sql b/clouds/snowflake/modules/sql/quadbin/QUADBIN_TOPARENT.sql index 6e0dcf188..17bcf8990 100644 --- a/clouds/snowflake/modules/sql/quadbin/QUADBIN_TOPARENT.sql +++ b/clouds/snowflake/modules/sql/quadbin/QUADBIN_TOPARENT.sql @@ -47,4 +47,4 @@ AS $$ resolution * 2 ) ) -$$; \ No newline at end of file +$$; diff --git a/clouds/snowflake/version b/clouds/snowflake/version index e8ea05db8..c813fe116 100644 --- a/clouds/snowflake/version +++ b/clouds/snowflake/version @@ -1 +1 @@ -1.2.4 +1.2.5 From c0b02a72a813471ef7c27b20e9cd97960effcacb Mon Sep 17 00:00:00 2001 From: Valentin de la Cruz Barquero <6054336+vdelacruzb@users.noreply.github.com> Date: Thu, 26 Sep 2024 12:27:52 +0200 Subject: [PATCH 11/25] chore(bq): fix @google-cloud/bigquery to version 7.9.0 (#531) --- clouds/bigquery/common/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clouds/bigquery/common/package.json b/clouds/bigquery/common/package.json index cb669992e..0343e37c9 100644 --- a/clouds/bigquery/common/package.json +++ b/clouds/bigquery/common/package.json @@ -1,7 +1,7 @@ { "license": "BSD-3-Clause", "devDependencies": { - "@google-cloud/bigquery": "^7.3.0", + "@google-cloud/bigquery": "7.9.0", "@rollup/plugin-commonjs": "^17.1.0", "@rollup/plugin-json": "^4.1.0", "@rollup/plugin-node-resolve": "^13.0.0", From 3bb797a3869b6a2658a37967d9bfc84d4fda1dd1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Olaya?= Date: Wed, 23 Oct 2024 15:03:51 +0200 Subject: [PATCH 12/25] chore(bq,sf,rs,pg|h3,quadbin): added "geo" aliases for certain functions (#526) --- .../modules/doc/h3/H3_FROMGEOGPOINT.md | 2 +- .../doc/quadbin/QUADBIN_FROMGEOGPOINT.md | 2 +- .../modules/sql/h3/H3_FROMGEOGPOINT.sql | 15 +++++++-- .../sql/quadbin/QUADBIN_FROMGEOGPOINT.sql | 15 +++++++-- .../modules/test/h3/H3_FROMGEOGPOINT.test.js | 31 +++++++++++++++++ .../quadbin/QUADBIN_FROMGEOGPOINT.test.js | 7 ++++ .../modules/doc/h3/H3_FROMGEOGPOINT.md | 2 +- .../doc/quadbin/QUADBIN_FROMGEOGPOINT.md | 2 +- .../modules/sql/h3/H3_FROMGEOGPOINT.sql | 17 ++++++++-- .../sql/quadbin/QUADBIN_FROMGEOGPOINT.sql | 17 ++++++++-- .../modules/test/h3/test_H3_FROMGEOGPOINT.py | 31 +++++++++++++++++ .../quadbin/test_QUADBIN_FROMGEOGPOINT.py | 8 +++++ .../doc/quadbin/QUADBIN_FROMGEOGPOINT.md | 2 +- .../sql/quadbin/QUADBIN_FROMGEOGPOINT.sql | 15 +++++++-- .../quadbin/test_QUADBIN_FROMGEOGPOINT.py | 9 +++++ .../modules/doc/h3/H3_FROMGEOGPOINT.md | 2 +- .../doc/quadbin/QUADBIN_FROMGEOGPOINT.md | 2 +- .../modules/sql/h3/H3_FROMGEOGPOINT.sql | 8 +++++ .../sql/quadbin/QUADBIN_FROMGEOGPOINT.sql | 14 ++++++-- ...POINT.spec.js => H3_FROMGEOGPOINT.test.js} | 33 +++++++++++++++++++ ...LONGLAT.spec.js => H3_FROMLONGLAT.test.js} | 0 .../quadbin/QUADBIN_FROMGEOGPOINT.test.js | 7 ++++ 22 files changed, 216 insertions(+), 25 deletions(-) rename clouds/snowflake/modules/test/h3/{H3_FROMGEOGPOINT.spec.js => H3_FROMGEOGPOINT.test.js} (62%) rename clouds/snowflake/modules/test/h3/{H3_FROMLONGLAT.spec.js => H3_FROMLONGLAT.test.js} (100%) diff --git a/clouds/bigquery/modules/doc/h3/H3_FROMGEOGPOINT.md b/clouds/bigquery/modules/doc/h3/H3_FROMGEOGPOINT.md index 63bd18b66..58d018d52 100644 --- a/clouds/bigquery/modules/doc/h3/H3_FROMGEOGPOINT.md +++ b/clouds/bigquery/modules/doc/h3/H3_FROMGEOGPOINT.md @@ -6,7 +6,7 @@ H3_FROMGEOGPOINT(point, resolution) **Description** -Returns the H3 cell index that the point belongs to in the required `resolution`. It will return `null` on error (invalid geography type or resolution out of bounds). +Returns the H3 cell index that the point belongs to in the required `resolution`. It will return `null` on error (invalid geography type or resolution out of bounds). This function is an alias for `H3_FROMGEOPOINT`. * `point`: `GEOGRAPHY` point to get the H3 cell from. * `resolution`: `INT64` number between 0 and 15 with the [H3 resolution](https://h3geo.org/docs/core-library/restable). diff --git a/clouds/bigquery/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md b/clouds/bigquery/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md index 59799e32f..0f8496e6a 100644 --- a/clouds/bigquery/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md +++ b/clouds/bigquery/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md @@ -6,7 +6,7 @@ QUADBIN_FROMGEOGPOINT(point, resolution) **Description** -Returns the Quadbin of a given point at a given level of detail. +Returns the Quadbin of a given point at a given level of detail. This function is an alias for `QUADBIN_FROMGEOPOINT`. * `point`: `GEOGRAPHY` point to get the Quadbin from. * `resolution`: `INT64` level of detail or zoom. diff --git a/clouds/bigquery/modules/sql/h3/H3_FROMGEOGPOINT.sql b/clouds/bigquery/modules/sql/h3/H3_FROMGEOGPOINT.sql index 12ad026a3..f6725b114 100644 --- a/clouds/bigquery/modules/sql/h3/H3_FROMGEOGPOINT.sql +++ b/clouds/bigquery/modules/sql/h3/H3_FROMGEOGPOINT.sql @@ -1,6 +1,6 @@ ----------------------------- --- Copyright (C) 2021 CARTO ----------------------------- +-------------------------------- +-- Copyright (C) 2021-2024 CARTO +-------------------------------- CREATE OR REPLACE FUNCTION `@@BQ_DATASET@@.H3_FROMGEOGPOINT` (geog GEOGRAPHY, resolution INT64) @@ -10,3 +10,12 @@ AS ( SAFE.ST_X(geog), SAFE.ST_Y(geog), resolution ) ); + +CREATE OR REPLACE FUNCTION `@@BQ_DATASET@@.H3_FROMGEOPOINT` +(geo GEOGRAPHY, resolution INT64) +RETURNS STRING +AS ( + `@@BQ_DATASET@@.H3_FROMGEOGPOINT`( + geo, resolution + ) +); diff --git a/clouds/bigquery/modules/sql/quadbin/QUADBIN_FROMGEOGPOINT.sql b/clouds/bigquery/modules/sql/quadbin/QUADBIN_FROMGEOGPOINT.sql index 53cf9ecd1..94eaa4570 100644 --- a/clouds/bigquery/modules/sql/quadbin/QUADBIN_FROMGEOGPOINT.sql +++ b/clouds/bigquery/modules/sql/quadbin/QUADBIN_FROMGEOGPOINT.sql @@ -1,6 +1,6 @@ ----------------------------- --- Copyright (C) 2022 CARTO ----------------------------- +-------------------------------- +-- Copyright (C) 2022-2024 CARTO +-------------------------------- CREATE OR REPLACE FUNCTION `@@BQ_DATASET@@.QUADBIN_FROMGEOGPOINT` (point GEOGRAPHY, resolution INT64) @@ -10,3 +10,12 @@ AS ( ST_X(point), ST_Y(point), resolution ) ); + +CREATE OR REPLACE FUNCTION `@@BQ_DATASET@@.QUADBIN_FROMGEOPOINT` +(point GEOGRAPHY, resolution INT64) +RETURNS INT64 +AS ( + `@@BQ_DATASET@@.QUADBIN_FROMGEOGPOINT`( + point, resolution + ) +); diff --git a/clouds/bigquery/modules/test/h3/H3_FROMGEOGPOINT.test.js b/clouds/bigquery/modules/test/h3/H3_FROMGEOGPOINT.test.js index fe75d3e43..f8ce4d8aa 100644 --- a/clouds/bigquery/modules/test/h3/H3_FROMGEOGPOINT.test.js +++ b/clouds/bigquery/modules/test/h3/H3_FROMGEOGPOINT.test.js @@ -50,4 +50,35 @@ test('H3_FROMGEOGPOINT returns NULL with non POINT geographies', async () => { null, null ]); +}); + +test('H3_FROMGEOPOINT returns the proper INT64', async () => { + const query = ` + WITH inputs AS + ( + SELECT 1 AS id, ST_GEOGPOINT(-122.0553238, 37.3615593) as geom, 5 as resolution UNION ALL + SELECT 2 AS id, ST_GEOGPOINT(-164.991559, 30.943387) as geom, 5 as resolution UNION ALL + SELECT 3 AS id, ST_GEOGPOINT(71.52790329909925, 46.04189431883772) as geom, 15 as resolution UNION ALL + + -- null inputs + SELECT 4 AS id, NULL AS geom, 5 as resolution UNION ALL + SELECT 5 AS id, ST_GEOGPOINT(-122.0553238, 37.3615593) as geom, -1 as resolution UNION ALL + SELECT 6 AS id, ST_GEOGPOINT(-122.0553238, 37.3615593) as geom, 20 as resolution UNION ALL + SELECT 7 AS id, ST_GEOGPOINT(-122.0553238, 37.3615593) as geom, NULL as resolution + ) + SELECT CAST(\`@@BQ_DATASET@@.H3_FROMGEOPOINT\`(geom, resolution) AS STRING) as h3_id + FROM inputs + ORDER BY id ASC + `; + const rows = await runQuery(query); + expect(rows.length).toEqual(7); + expect(rows.map((r) => r.h3_id)).toEqual([ + '85283473fffffff', + '8547732ffffffff', + '8f2000000000000', + null, + null, + null, + null + ]); }); \ No newline at end of file diff --git a/clouds/bigquery/modules/test/quadbin/QUADBIN_FROMGEOGPOINT.test.js b/clouds/bigquery/modules/test/quadbin/QUADBIN_FROMGEOGPOINT.test.js index 51f2a03dd..93b094856 100644 --- a/clouds/bigquery/modules/test/quadbin/QUADBIN_FROMGEOGPOINT.test.js +++ b/clouds/bigquery/modules/test/quadbin/QUADBIN_FROMGEOGPOINT.test.js @@ -5,4 +5,11 @@ test('QUADBIN_FROMGEOGPOINT should work', async () => { const rows = await runQuery(query); expect(rows.length).toEqual(1); expect(rows[0].output).toEqual('5209574053332910079'); +}); + +test('QUADBIN_FROMGEOPOINT should work', async () => { + const query = 'SELECT CAST(`@@BQ_DATASET@@.QUADBIN_FROMGEOPOINT`(ST_GEOGPOINT(40.4168, -3.7038), 4) AS STRING) AS output'; + const rows = await runQuery(query); + expect(rows.length).toEqual(1); + expect(rows[0].output).toEqual('5209574053332910079'); }); \ No newline at end of file diff --git a/clouds/postgres/modules/doc/h3/H3_FROMGEOGPOINT.md b/clouds/postgres/modules/doc/h3/H3_FROMGEOGPOINT.md index acd6c6a4f..c5af10567 100644 --- a/clouds/postgres/modules/doc/h3/H3_FROMGEOGPOINT.md +++ b/clouds/postgres/modules/doc/h3/H3_FROMGEOGPOINT.md @@ -6,7 +6,7 @@ H3_FROMGEOGPOINT(point, resolution) **Description** -Returns the H3 cell index that the point belongs to in the required `resolution`. It will return `null` on error (invalid geography type or resolution out of bounds). +Returns the H3 cell index that the point belongs to in the required `resolution`. It will return `null` on error (invalid geography type or resolution out of bounds). This function is an alias for `H3_FROMGEOPOINT`. * `point`: `GEOMETRY` point to get the H3 cell from. * `resolution`: `INT` number between 0 and 15 with the [H3 resolution](https://h3geo.org/docs/core-library/restable). diff --git a/clouds/postgres/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md b/clouds/postgres/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md index 18a095d29..b0cf5f192 100644 --- a/clouds/postgres/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md +++ b/clouds/postgres/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md @@ -6,7 +6,7 @@ QUADBIN_FROMGEOGPOINT(point, resolution) **Description** -Returns the Quadbin of a given point at a given level of detail. +Returns the Quadbin of a given point at a given level of detail. This function is an alias for `QUADBIN_FROMGEOPOINT`. * `point`: `GEOMETRY` point to get the Quadbin from. * `resolution`: `BIGINT` level of detail or zoom. diff --git a/clouds/postgres/modules/sql/h3/H3_FROMGEOGPOINT.sql b/clouds/postgres/modules/sql/h3/H3_FROMGEOGPOINT.sql index 4c66d3c4c..664818065 100644 --- a/clouds/postgres/modules/sql/h3/H3_FROMGEOGPOINT.sql +++ b/clouds/postgres/modules/sql/h3/H3_FROMGEOGPOINT.sql @@ -1,6 +1,6 @@ ----------------------------- --- Copyright (C) 2023 CARTO ----------------------------- +-------------------------------- +-- Copyright (C) 2023-2024 CARTO +-------------------------------- CREATE OR REPLACE FUNCTION @@PG_SCHEMA@@.H3_FROMGEOGPOINT( geog GEOMETRY, @@ -17,3 +17,14 @@ $BODY$ END $BODY$ LANGUAGE sql IMMUTABLE PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION @@PG_SCHEMA@@.H3_FROMGEOPOINT( + geo GEOMETRY, + resolution INT +) +RETURNS VARCHAR(16) +AS +$BODY$ + SELECT @@PG_SCHEMA@@.H3_FROMGEOGPOINT(geo, resolution) +$BODY$ +LANGUAGE sql IMMUTABLE PARALLEL SAFE; diff --git a/clouds/postgres/modules/sql/quadbin/QUADBIN_FROMGEOGPOINT.sql b/clouds/postgres/modules/sql/quadbin/QUADBIN_FROMGEOGPOINT.sql index ff3a1ea6c..140020bb5 100644 --- a/clouds/postgres/modules/sql/quadbin/QUADBIN_FROMGEOGPOINT.sql +++ b/clouds/postgres/modules/sql/quadbin/QUADBIN_FROMGEOGPOINT.sql @@ -1,6 +1,6 @@ ----------------------------- --- Copyright (C) 2022 CARTO ----------------------------- +-------------------------------- +-- Copyright (C) 2022-2024 CARTO +-------------------------------- CREATE OR REPLACE FUNCTION @@PG_SCHEMA@@.QUADBIN_FROMGEOGPOINT( point GEOMETRY, @@ -20,3 +20,14 @@ $BODY$ FROM __geom4326; $BODY$ LANGUAGE sql IMMUTABLE PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION @@PG_SCHEMA@@.QUADBIN_FROMGEOPOINT( + point GEOMETRY, + resolution INT +) +RETURNS BIGINT +AS +$BODY$ + SELECT @@PG_SCHEMA@@.QUADBIN_FROMGEOGPOINT(point, resolution) +$BODY$ +LANGUAGE sql IMMUTABLE PARALLEL SAFE; diff --git a/clouds/postgres/modules/test/h3/test_H3_FROMGEOGPOINT.py b/clouds/postgres/modules/test/h3/test_H3_FROMGEOGPOINT.py index b75073ead..a829113ef 100644 --- a/clouds/postgres/modules/test/h3/test_H3_FROMGEOGPOINT.py +++ b/clouds/postgres/modules/test/h3/test_H3_FROMGEOGPOINT.py @@ -51,3 +51,34 @@ def test_h3_fromgeogpoint_non_points(): assert result[0][0] is None assert result[1][0] is None assert result[2][0] is None + + +def test_h3_fromgeopoint(): + """Returns the proper index.""" + result = run_query( + """ + WITH inputs AS + ( + SELECT 1 AS id, ST_POINT(-122.0553238, 37.3615593) as geom, 5 as resolution UNION ALL + SELECT 2 AS id, ST_POINT(-164.991559, 30.943387) as geom, 5 as resolution UNION ALL + SELECT 3 AS id, ST_POINT(71.52790329909925, 46.04189431883772) as geom, 15 as resolution UNION ALL + + -- null inputs + SELECT 4 AS id, NULL AS geom, 5 as resolution UNION ALL + SELECT 5 AS id, ST_POINT(-122.0553238, 37.3615593) as geom, -1 as resolution UNION ALL + SELECT 6 AS id, ST_POINT(-122.0553238, 37.3615593) as geom, 20 as resolution UNION ALL + SELECT 7 AS id, ST_POINT(-122.0553238, 37.3615593) as geom, NULL as resolution + ) + SELECT @@PG_SCHEMA@@.H3_FROMGEOPOINT(geom, resolution) as h3_id + FROM inputs + ORDER BY id ASC + """ # noqa + ) + assert len(result) == 7 + assert result[0][0] == '85283473fffffff' + assert result[1][0] == '8547732ffffffff' + assert result[2][0] == '8f2000000000000' + assert result[3][0] is None + assert result[4][0] is None + assert result[5][0] is None + assert result[6][0] is None diff --git a/clouds/postgres/modules/test/quadbin/test_QUADBIN_FROMGEOGPOINT.py b/clouds/postgres/modules/test/quadbin/test_QUADBIN_FROMGEOGPOINT.py index 0589405c2..525d0387e 100644 --- a/clouds/postgres/modules/test/quadbin/test_QUADBIN_FROMGEOGPOINT.py +++ b/clouds/postgres/modules/test/quadbin/test_QUADBIN_FROMGEOGPOINT.py @@ -29,3 +29,11 @@ def test_quadbin_fromgeogpoint_other_srid(): """ ) assert result[0][0] == 5209574053332910079 + + +def test_quadbin_fromgeopoint_no_srid(): + """Computes quadbin for point with no SRID.""" + result = run_query( + 'SELECT @@PG_SCHEMA@@.QUADBIN_FROMGEOPOINT(ST_MAKEPOINT(40.4168, -3.7038), 4)' + ) + assert result[0][0] == 5209574053332910079 diff --git a/clouds/redshift/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md b/clouds/redshift/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md index 344f309d1..db16ae0cf 100644 --- a/clouds/redshift/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md +++ b/clouds/redshift/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md @@ -6,7 +6,7 @@ QUADBIN_FROMGEOGPOINT(point, resolution) **Description** -Returns the Quadbin of a given point at a given level of detail. +Returns the Quadbin of a given point at a given level of detail. This function is an alias for `QUADBIN_FROMGEOPOINT`. * `point`: `GEOMETRY` point to get the Quadbin from. * `resolution`: `INT` level of detail or zoom. diff --git a/clouds/redshift/modules/sql/quadbin/QUADBIN_FROMGEOGPOINT.sql b/clouds/redshift/modules/sql/quadbin/QUADBIN_FROMGEOGPOINT.sql index b936d4b8e..1c208255e 100644 --- a/clouds/redshift/modules/sql/quadbin/QUADBIN_FROMGEOGPOINT.sql +++ b/clouds/redshift/modules/sql/quadbin/QUADBIN_FROMGEOGPOINT.sql @@ -1,6 +1,6 @@ ----------------------------- --- Copyright (C) 2022 CARTO ----------------------------- +-------------------------------- +-- Copyright (C) 2022-2024 CARTO +-------------------------------- CREATE OR REPLACE FUNCTION @@RS_SCHEMA@@.QUADBIN_FROMGEOGPOINT (GEOMETRY, INT) @@ -13,3 +13,12 @@ AS $$ ELSE @@RS_SCHEMA@@.QUADBIN_FROMLONGLAT(ST_X(ST_TRANSFORM($1, 4326)), ST_Y(ST_TRANSFORM($1, 4326)), $2) END $$ LANGUAGE sql; + +CREATE OR REPLACE FUNCTION @@RS_SCHEMA@@.QUADBIN_FROMGEOPOINT +(GEOMETRY, INT) +-- (point, resolution) +RETURNS BIGINT +STABLE +AS $$ + SELECT @@RS_SCHEMA@@.QUADBIN_FROMGEOGPOINT($1, $2) +$$ LANGUAGE sql; diff --git a/clouds/redshift/modules/test/quadbin/test_QUADBIN_FROMGEOGPOINT.py b/clouds/redshift/modules/test/quadbin/test_QUADBIN_FROMGEOGPOINT.py index 0c16b71f2..eccd396a7 100644 --- a/clouds/redshift/modules/test/quadbin/test_QUADBIN_FROMGEOGPOINT.py +++ b/clouds/redshift/modules/test/quadbin/test_QUADBIN_FROMGEOGPOINT.py @@ -8,3 +8,12 @@ def test_quadbin_fromgeogpoint(): assert len(result[0]) == 1 assert result[0][0] == 5209574053332910079 + + +def test_quadbin_fromgeopoint(): + result = run_query( + 'SELECT @@RS_SCHEMA@@.QUADBIN_FROMGEOPOINT(ST_POINT(40.4168, -3.7038),4)' + ) + + assert len(result[0]) == 1 + assert result[0][0] == 5209574053332910079 \ No newline at end of file diff --git a/clouds/snowflake/modules/doc/h3/H3_FROMGEOGPOINT.md b/clouds/snowflake/modules/doc/h3/H3_FROMGEOGPOINT.md index 73ac27d77..f6c5f1686 100644 --- a/clouds/snowflake/modules/doc/h3/H3_FROMGEOGPOINT.md +++ b/clouds/snowflake/modules/doc/h3/H3_FROMGEOGPOINT.md @@ -6,7 +6,7 @@ H3_FROMGEOGPOINT(point, resolution) **Description** -Returns the H3 cell index that the point belongs to in the required `resolution`. It will return `null` on error (invalid geography type or resolution out of bounds). +Returns the H3 cell index that the point belongs to in the required `resolution`. It will return `null` on error (invalid geography type or resolution out of bounds). This function is an alias for `H3_FROMGEOPOINT`. * `point`: `GEOGRAPHY` point to get the H3 cell from. * `resolution`: `INT` number between 0 and 15 with the [H3 resolution](https://h3geo.org/docs/core-library/restable). diff --git a/clouds/snowflake/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md b/clouds/snowflake/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md index 55add4864..ea240aecb 100644 --- a/clouds/snowflake/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md +++ b/clouds/snowflake/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md @@ -6,7 +6,7 @@ QUADBIN_FROMGEOGPOINT(point, resolution) **Description** -Returns the Quadbin of a given point at a given level of detail. +Returns the Quadbin of a given point at a given level of detail. This function is an alias for `QUADBIN_FROMGEOPOINT`. * `point`: `GEOGRAPHY` point to get the Quadbin from. * `resolution`: `INT` level of detail or zoom. diff --git a/clouds/snowflake/modules/sql/h3/H3_FROMGEOGPOINT.sql b/clouds/snowflake/modules/sql/h3/H3_FROMGEOGPOINT.sql index 069551dd8..49a2bb512 100644 --- a/clouds/snowflake/modules/sql/h3/H3_FROMGEOGPOINT.sql +++ b/clouds/snowflake/modules/sql/h3/H3_FROMGEOGPOINT.sql @@ -11,3 +11,11 @@ AS $$ H3_POINT_TO_CELL_STRING(GEOG, RESOLUTION), NULL) $$; + +CREATE OR REPLACE SECURE FUNCTION @@SF_SCHEMA@@.H3_FROMGEOPOINT +(geo GEOGRAPHY, resolution INT) +RETURNS STRING +IMMUTABLE +AS $$ + @@SF_SCHEMA@@.H3_FROMGEOGPOINT(geo, resolution) +$$; diff --git a/clouds/snowflake/modules/sql/quadbin/QUADBIN_FROMGEOGPOINT.sql b/clouds/snowflake/modules/sql/quadbin/QUADBIN_FROMGEOGPOINT.sql index b130ee2a6..4b6efa3c3 100644 --- a/clouds/snowflake/modules/sql/quadbin/QUADBIN_FROMGEOGPOINT.sql +++ b/clouds/snowflake/modules/sql/quadbin/QUADBIN_FROMGEOGPOINT.sql @@ -1,6 +1,6 @@ ----------------------------- --- Copyright (C) 2022 CARTO ----------------------------- +-------------------------------- +-- Copyright (C) 2022-2024 CARTO +-------------------------------- CREATE OR REPLACE SECURE FUNCTION @@SF_SCHEMA@@.QUADBIN_FROMGEOGPOINT (point GEOGRAPHY, resolution INT) @@ -17,3 +17,11 @@ IMMUTABLE AS $$ @@SF_SCHEMA@@._QUADBIN_FROMLONGLAT(ST_X(point), ST_Y(point), resolution) $$; + +CREATE OR REPLACE SECURE FUNCTION @@SF_SCHEMA@@.QUADBIN_FROMGEOPOINT +(point GEOGRAPHY, resolution INT) +RETURNS BIGINT +IMMUTABLE +AS $$ + @@SF_SCHEMA@@._QUADBIN_FROMGEOGPOINT(point, resolution) +$$; diff --git a/clouds/snowflake/modules/test/h3/H3_FROMGEOGPOINT.spec.js b/clouds/snowflake/modules/test/h3/H3_FROMGEOGPOINT.test.js similarity index 62% rename from clouds/snowflake/modules/test/h3/H3_FROMGEOGPOINT.spec.js rename to clouds/snowflake/modules/test/h3/H3_FROMGEOGPOINT.test.js index 1864f6f9a..a7f87c809 100644 --- a/clouds/snowflake/modules/test/h3/H3_FROMGEOGPOINT.spec.js +++ b/clouds/snowflake/modules/test/h3/H3_FROMGEOGPOINT.test.js @@ -53,4 +53,37 @@ test('H3_FROMGEOGPOINT returns NULL with non POINT geographies', async () => { null, null ]); +}); + +test('H3_FROMGEOPOINT returns the proper INT64', async () => { + const query = ` + WITH inputs AS + ( + SELECT 1 AS id, ST_POINT(-122.0553238, 37.3615593) as geom, 5 as resolution UNION ALL + SELECT 2 AS id, ST_POINT(-164.991559, 30.943387) as geom, 5 as resolution UNION ALL + SELECT 3 AS id, ST_POINT(71.52790329909925, 46.04189431883772) as geom, 15 as resolution UNION ALL + + -- null inputs + SELECT 4 AS id, TRY_TO_GEOGRAPHY(NULL) AS geom, 5 as resolution UNION ALL + SELECT 5 AS id, ST_POINT(-122.0553238, 37.3615593) as geom, -1 as resolution UNION ALL + SELECT 6 AS id, ST_POINT(-122.0553238, 37.3615593) as geom, 20 as resolution UNION ALL + SELECT 7 AS id, ST_POINT(-122.0553238, 37.3615593) as geom, NULL as resolution + ) + SELECT + CAST(H3_FROMGEOPOINT(geom, resolution) AS STRING) as h3_id + FROM inputs + ORDER BY id ASC + `; + + const rows = await runQuery(query); + expect(rows.length).toEqual(7); + expect(rows.map((r) => r.H3_ID)).toEqual([ + '85283473fffffff', + '8547732ffffffff', + '8f2000000000000', + null, + null, + null, + null + ]); }); \ No newline at end of file diff --git a/clouds/snowflake/modules/test/h3/H3_FROMLONGLAT.spec.js b/clouds/snowflake/modules/test/h3/H3_FROMLONGLAT.test.js similarity index 100% rename from clouds/snowflake/modules/test/h3/H3_FROMLONGLAT.spec.js rename to clouds/snowflake/modules/test/h3/H3_FROMLONGLAT.test.js diff --git a/clouds/snowflake/modules/test/quadbin/QUADBIN_FROMGEOGPOINT.test.js b/clouds/snowflake/modules/test/quadbin/QUADBIN_FROMGEOGPOINT.test.js index dc65b4adb..b90e2efd2 100644 --- a/clouds/snowflake/modules/test/quadbin/QUADBIN_FROMGEOGPOINT.test.js +++ b/clouds/snowflake/modules/test/quadbin/QUADBIN_FROMGEOGPOINT.test.js @@ -5,4 +5,11 @@ test('QUADBIN_FROMGEOGPOINT should work', async () => { const rows = await runQuery(query); expect(rows.length).toEqual(1); expect(rows[0].OUTPUT).toEqual('5209574053332910079'); +}); + +test('QUADBIN_FROMGEOPOINT should work', async () => { + const query = 'SELECT CAST(QUADBIN_FROMGEOPOINT(ST_POINT(40.4168, -3.7038), 4) AS STRING) AS OUTPUT'; + const rows = await runQuery(query); + expect(rows.length).toEqual(1); + expect(rows[0].OUTPUT).toEqual('5209574053332910079'); }); \ No newline at end of file From a72b208f0e2d286f204a2d2749a48cfb222a2b91 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 29 Oct 2024 11:56:42 +0100 Subject: [PATCH 13/25] Merge release into main branch (#534) --- CHANGELOG.md | 5 +++++ clouds/bigquery/CHANGELOG.md | 5 +++++ clouds/bigquery/modules/doc/h3/H3_FROMGEOGPOINT.md | 2 +- clouds/bigquery/modules/doc/h3/H3_POLYFILL.md | 2 +- clouds/bigquery/modules/doc/h3/H3_POLYFILL_MODE.md | 2 +- clouds/bigquery/modules/doc/h3/H3_POLYFILL_TABLE.md | 2 +- clouds/bigquery/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md | 2 +- clouds/bigquery/modules/doc/quadbin/QUADBIN_FROMLONGLAT.md | 2 +- clouds/bigquery/modules/doc/quadbin/QUADBIN_POLYFILL.md | 2 +- clouds/bigquery/modules/doc/quadbin/QUADBIN_POLYFILL_MODE.md | 2 +- .../bigquery/modules/doc/quadbin/QUADBIN_POLYFILL_TABLE.md | 2 +- clouds/bigquery/modules/doc/s2/S2_FROMGEOGPOINT.md | 2 +- clouds/bigquery/version | 2 +- clouds/postgres/CHANGELOG.md | 4 ++++ clouds/postgres/modules/doc/h3/H3_FROMGEOGPOINT.md | 2 +- clouds/postgres/modules/doc/h3/H3_POLYFILL.md | 2 +- clouds/postgres/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md | 2 +- clouds/postgres/modules/doc/quadbin/QUADBIN_FROMLONGLAT.md | 2 +- clouds/postgres/modules/doc/quadbin/QUADBIN_POLYFILL.md | 2 +- clouds/postgres/version | 2 +- clouds/redshift/CHANGELOG.md | 4 ++++ clouds/redshift/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md | 2 +- clouds/redshift/modules/doc/quadbin/QUADBIN_FROMLONGLAT.md | 2 +- clouds/redshift/modules/doc/quadbin/QUADBIN_POLYFILL.md | 2 +- clouds/redshift/modules/doc/s2/S2_FROMGEOGPOINT.md | 2 +- clouds/redshift/modules/doc/s2/S2_FROMLONGLAT.md | 2 +- clouds/redshift/version | 2 +- clouds/snowflake/CHANGELOG.md | 4 ++++ clouds/snowflake/modules/doc/h3/H3_FROMGEOGPOINT.md | 2 +- clouds/snowflake/modules/doc/h3/H3_POLYFILL_TABLE.md | 2 +- .../snowflake/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md | 2 +- clouds/snowflake/modules/doc/quadbin/QUADBIN_FROMLONGLAT.md | 2 +- clouds/snowflake/modules/doc/quadbin/QUADBIN_POLYFILL.md | 2 +- clouds/snowflake/modules/doc/s2/S2_FROMGEOGPOINT.md | 2 +- clouds/snowflake/version | 2 +- 35 files changed, 52 insertions(+), 30 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8000d888a..70c97ee15 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,11 @@ CARTO Analytics Toolbox Core. All notable commits to this project will be documented in this file. +## 2024-10-28 + +- chore(bq): fix @google-cloud/bigquery to version 7.9.0 (#531) +- chore(bq,sf,rs,pg|h3,quadbin): added "geo" aliases for certain functions (#526) + ## 2024-09-23 - feat(sf): added warehouse option for SF (#524) diff --git a/clouds/bigquery/CHANGELOG.md b/clouds/bigquery/CHANGELOG.md index d25171df8..d82bac8e9 100644 --- a/clouds/bigquery/CHANGELOG.md +++ b/clouds/bigquery/CHANGELOG.md @@ -4,6 +4,11 @@ CARTO Analytics Toolbox Core for BigQuery. All notable commits to this project will be documented in this file. +## [1.2.4] - 2024-10-28 + +- chore: fix @google-cloud/bigquery to version 7.9.0 (#531) +- chore(h3,quadbin): added "geo" aliases for certain functions (#526) + ## [1.2.3] - 2024-06-27 - chore(quadbin): optimize polyfill (#513) diff --git a/clouds/bigquery/modules/doc/h3/H3_FROMGEOGPOINT.md b/clouds/bigquery/modules/doc/h3/H3_FROMGEOGPOINT.md index 58d018d52..5f92aaee1 100644 --- a/clouds/bigquery/modules/doc/h3/H3_FROMGEOGPOINT.md +++ b/clouds/bigquery/modules/doc/h3/H3_FROMGEOGPOINT.md @@ -6,7 +6,7 @@ H3_FROMGEOGPOINT(point, resolution) **Description** -Returns the H3 cell index that the point belongs to in the required `resolution`. It will return `null` on error (invalid geography type or resolution out of bounds). This function is an alias for `H3_FROMGEOPOINT`. +Returns the H3 cell index that the point belongs to in the requested `resolution`. It will return `null` on error (invalid geography type or resolution out of bounds). This function is an alias for `H3_FROMGEOPOINT`. * `point`: `GEOGRAPHY` point to get the H3 cell from. * `resolution`: `INT64` number between 0 and 15 with the [H3 resolution](https://h3geo.org/docs/core-library/restable). diff --git a/clouds/bigquery/modules/doc/h3/H3_POLYFILL.md b/clouds/bigquery/modules/doc/h3/H3_POLYFILL.md index 72067fd74..7ff458703 100644 --- a/clouds/bigquery/modules/doc/h3/H3_POLYFILL.md +++ b/clouds/bigquery/modules/doc/h3/H3_POLYFILL.md @@ -6,7 +6,7 @@ H3_POLYFILL(geog, resolution) **Description** -Returns an array of H3 cell indexes contained in the given geography (Polygon, MultiPolygon) at a given level of detail. Containment is determined by the cells' center. This function is equivalent to [`H3_POLYFILL_MODE`](h3#h3_polyfill_mode) with mode `center`. +Returns an array of H3 cell indexes contained in the given geography (Polygon, MultiPolygon) at a requested resolution. Containment is determined by the cells' center. This function is equivalent to [`H3_POLYFILL_MODE`](h3#h3_polyfill_mode) with mode `center`. * `geog`: `GEOGRAPHY` representing the shape to cover. * `resolution`: `INT64` level of detail. The value must be between 0 and 15 ([H3 resolution table](https://h3geo.org/docs/core-library/restable)). diff --git a/clouds/bigquery/modules/doc/h3/H3_POLYFILL_MODE.md b/clouds/bigquery/modules/doc/h3/H3_POLYFILL_MODE.md index da41d4e68..00f2f3891 100644 --- a/clouds/bigquery/modules/doc/h3/H3_POLYFILL_MODE.md +++ b/clouds/bigquery/modules/doc/h3/H3_POLYFILL_MODE.md @@ -6,7 +6,7 @@ H3_POLYFILL_MODE(geog, resolution, mode) **Description** -Returns an array of H3 cell indexes contained in the given geography at a given level of detail. Containment is determined by the mode: center, intersects, contains. +Returns an array of H3 cell indexes contained in the given geography at a requested resolution. Containment is determined by the mode: center, intersects, contains. * `geog`: `GEOGRAPHY` representing the shape to cover. * `resolution`: `INT64` level of detail. The value must be between 0 and 15 ([H3 resolution table](https://h3geo.org/docs/core-library/restable)). diff --git a/clouds/bigquery/modules/doc/h3/H3_POLYFILL_TABLE.md b/clouds/bigquery/modules/doc/h3/H3_POLYFILL_TABLE.md index 885c23b72..5491aabb9 100644 --- a/clouds/bigquery/modules/doc/h3/H3_POLYFILL_TABLE.md +++ b/clouds/bigquery/modules/doc/h3/H3_POLYFILL_TABLE.md @@ -6,7 +6,7 @@ H3_POLYFILL_TABLE(input_query, resolution, mode, output_table) **Description** -Returns a table with the H3 cell indexes contained in the given geography at a given level of detail. Containment is determined by the mode: center, intersects, contains. All the attributes except the geography will be included in the output table, clustered by the h3 column. +Returns a table with the H3 cell indexes contained in the given geography at a requested resolution. Containment is determined by the mode: center, intersects, contains. All the attributes except the geography will be included in the output table, clustered by the h3 column. * `input_query`: `STRING` input data to polyfill. It must contain a column `geom` with the shape to cover. Additionally, other columns can be included. * `resolution`: `INT64` level of detail. The value must be between 0 and 15 ([H3 resolution table](https://h3geo.org/docs/core-library/restable)). diff --git a/clouds/bigquery/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md b/clouds/bigquery/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md index 0f8496e6a..a72b28323 100644 --- a/clouds/bigquery/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md +++ b/clouds/bigquery/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md @@ -6,7 +6,7 @@ QUADBIN_FROMGEOGPOINT(point, resolution) **Description** -Returns the Quadbin of a given point at a given level of detail. This function is an alias for `QUADBIN_FROMGEOPOINT`. +Returns the Quadbin of a given point at a requested resolution. This function is an alias for `QUADBIN_FROMGEOPOINT`. * `point`: `GEOGRAPHY` point to get the Quadbin from. * `resolution`: `INT64` level of detail or zoom. diff --git a/clouds/bigquery/modules/doc/quadbin/QUADBIN_FROMLONGLAT.md b/clouds/bigquery/modules/doc/quadbin/QUADBIN_FROMLONGLAT.md index e992b2bd1..4cddb420c 100644 --- a/clouds/bigquery/modules/doc/quadbin/QUADBIN_FROMLONGLAT.md +++ b/clouds/bigquery/modules/doc/quadbin/QUADBIN_FROMLONGLAT.md @@ -6,7 +6,7 @@ QUADBIN_FROMLONGLAT(longitude, latitude, resolution) **Description** -Returns the Quadbin representation of a point for a given level of detail and geographic coordinates. +Returns the Quadbin representation of a point for a requested resolution and geographic coordinates. * `longitude`: `FLOAT64` longitude (WGS84) of the point. * `latitude`: `FLOAT64` latitude (WGS84) of the point. diff --git a/clouds/bigquery/modules/doc/quadbin/QUADBIN_POLYFILL.md b/clouds/bigquery/modules/doc/quadbin/QUADBIN_POLYFILL.md index 86dbb8407..4abbf4126 100644 --- a/clouds/bigquery/modules/doc/quadbin/QUADBIN_POLYFILL.md +++ b/clouds/bigquery/modules/doc/quadbin/QUADBIN_POLYFILL.md @@ -6,7 +6,7 @@ QUADBIN_POLYFILL(geog, resolution) **Description** -Returns an array of quadbin cell indexes contained in the given geography (Polygon, MultiPolygon) at a given level of detail. Containment is determined by the cells' center. This function is equivalent to [`QUADBIN_POLYFILL_MODE`](quadbin#quadbin_polyfill_mode) with mode `center`. +Returns an array of quadbin cell indexes contained in the given geography (Polygon, MultiPolygon) at a requested resolution. Containment is determined by the cells' center. This function is equivalent to [`QUADBIN_POLYFILL_MODE`](quadbin#quadbin_polyfill_mode) with mode `center`. * `geog`: `GEOGRAPHY` representing the shape to cover. * `resolution`: `INT64` level of detail. The value must be between 0 and 26. diff --git a/clouds/bigquery/modules/doc/quadbin/QUADBIN_POLYFILL_MODE.md b/clouds/bigquery/modules/doc/quadbin/QUADBIN_POLYFILL_MODE.md index a88990d71..80e10d6c4 100644 --- a/clouds/bigquery/modules/doc/quadbin/QUADBIN_POLYFILL_MODE.md +++ b/clouds/bigquery/modules/doc/quadbin/QUADBIN_POLYFILL_MODE.md @@ -6,7 +6,7 @@ QUADBIN_POLYFILL_MODE(geog, resolution, mode) **Description** -Returns an array of quadbin cell indexes contained in the given geography at a given level of detail. Containment is determined by the mode: center, intersects, contains. +Returns an array of quadbin cell indexes contained in the given geography at a requested resolution. Containment is determined by the mode: center, intersects, contains. * `geog`: `GEOGRAPHY` representing the shape to cover. * `resolution`: `INT64` level of detail. The value must be between 0 and 26. diff --git a/clouds/bigquery/modules/doc/quadbin/QUADBIN_POLYFILL_TABLE.md b/clouds/bigquery/modules/doc/quadbin/QUADBIN_POLYFILL_TABLE.md index f3f59728a..ccc695491 100644 --- a/clouds/bigquery/modules/doc/quadbin/QUADBIN_POLYFILL_TABLE.md +++ b/clouds/bigquery/modules/doc/quadbin/QUADBIN_POLYFILL_TABLE.md @@ -6,7 +6,7 @@ QUADBIN_POLYFILL_TABLE(input_query, resolution, mode, output_table) **Description** -Returns a table with the quadbin cell indexes contained in the given geography at a given level of detail. Containment is determined by the mode: center, intersects, contains. All the attributes except the geography will be included in the output table, clustered by the quadbin column. +Returns a table with the quadbin cell indexes contained in the given geography at a requested resolution. Containment is determined by the mode: center, intersects, contains. All the attributes except the geography will be included in the output table, clustered by the quadbin column. * `input_query`: `STRING` input data to polyfill. It must contain a column `geom` with the shape to cover. Additionally, other columns can be included. * `resolution`: `INT64` level of detail. The value must be between 0 and 26. diff --git a/clouds/bigquery/modules/doc/s2/S2_FROMGEOGPOINT.md b/clouds/bigquery/modules/doc/s2/S2_FROMGEOGPOINT.md index 9e4899640..f7e40ce5a 100644 --- a/clouds/bigquery/modules/doc/s2/S2_FROMGEOGPOINT.md +++ b/clouds/bigquery/modules/doc/s2/S2_FROMGEOGPOINT.md @@ -6,7 +6,7 @@ S2_FROMGEOGPOINT(point, resolution) **Description** -Returns the S2 cell ID of a given point at a given level of detail. +Returns the S2 cell ID of a given point at a requested resolution. * `point`: `GEOGRAPHY` point to get the ID from. * `resolution`: `INT64` level of detail or zoom. diff --git a/clouds/bigquery/version b/clouds/bigquery/version index 0495c4a88..e8ea05db8 100644 --- a/clouds/bigquery/version +++ b/clouds/bigquery/version @@ -1 +1 @@ -1.2.3 +1.2.4 diff --git a/clouds/postgres/CHANGELOG.md b/clouds/postgres/CHANGELOG.md index c79ebd098..c7f3a0185 100644 --- a/clouds/postgres/CHANGELOG.md +++ b/clouds/postgres/CHANGELOG.md @@ -4,6 +4,10 @@ CARTO Analytics Toolbox Core for Postgres. All notable commits to this project will be documented in this file. +## [1.3.1] - 2024-10-28 + +- chore(h3,quadbin): added "geo" aliases for certain functions (#526) + ## [1.3.0] - 2024-01-17 - feat(quadbin): add function QUADBIN_DISTANCE (#457) diff --git a/clouds/postgres/modules/doc/h3/H3_FROMGEOGPOINT.md b/clouds/postgres/modules/doc/h3/H3_FROMGEOGPOINT.md index c5af10567..3972eebef 100644 --- a/clouds/postgres/modules/doc/h3/H3_FROMGEOGPOINT.md +++ b/clouds/postgres/modules/doc/h3/H3_FROMGEOGPOINT.md @@ -6,7 +6,7 @@ H3_FROMGEOGPOINT(point, resolution) **Description** -Returns the H3 cell index that the point belongs to in the required `resolution`. It will return `null` on error (invalid geography type or resolution out of bounds). This function is an alias for `H3_FROMGEOPOINT`. +Returns the H3 cell index that the point belongs to in the requested `resolution`. It will return `null` on error (invalid geography type or resolution out of bounds). This function is an alias for `H3_FROMGEOPOINT`. * `point`: `GEOMETRY` point to get the H3 cell from. * `resolution`: `INT` number between 0 and 15 with the [H3 resolution](https://h3geo.org/docs/core-library/restable). diff --git a/clouds/postgres/modules/doc/h3/H3_POLYFILL.md b/clouds/postgres/modules/doc/h3/H3_POLYFILL.md index f0d82c1a3..76b71f69f 100644 --- a/clouds/postgres/modules/doc/h3/H3_POLYFILL.md +++ b/clouds/postgres/modules/doc/h3/H3_POLYFILL.md @@ -6,7 +6,7 @@ H3_POLYFILL(geom, resolution [, mode]) **Description** -Returns an array of H3 cell indexes contained in the given geometry at a given level of detail. Containment is determined by the mode: center, intersects, contains. +Returns an array of H3 cell indexes contained in the given geometry at a requested resolution. Containment is determined by the mode: center, intersects, contains. * `geom`: `GEOMETRY` representing the shape to cover. * `resolution`: `INT` level of detail. The value must be between 0 and 15 ([H3 resolution table](https://h3geo.org/docs/core-library/restable)). diff --git a/clouds/postgres/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md b/clouds/postgres/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md index b0cf5f192..d8d339541 100644 --- a/clouds/postgres/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md +++ b/clouds/postgres/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md @@ -6,7 +6,7 @@ QUADBIN_FROMGEOGPOINT(point, resolution) **Description** -Returns the Quadbin of a given point at a given level of detail. This function is an alias for `QUADBIN_FROMGEOPOINT`. +Returns the Quadbin of a given point at a requested resolution. This function is an alias for `QUADBIN_FROMGEOPOINT`. * `point`: `GEOMETRY` point to get the Quadbin from. * `resolution`: `BIGINT` level of detail or zoom. diff --git a/clouds/postgres/modules/doc/quadbin/QUADBIN_FROMLONGLAT.md b/clouds/postgres/modules/doc/quadbin/QUADBIN_FROMLONGLAT.md index 459e6209a..c775c5e42 100644 --- a/clouds/postgres/modules/doc/quadbin/QUADBIN_FROMLONGLAT.md +++ b/clouds/postgres/modules/doc/quadbin/QUADBIN_FROMLONGLAT.md @@ -6,7 +6,7 @@ QUADBIN_FROMLONGLAT(longitude, latitude, resolution) **Description** -Returns the Quadbin representation of a point for a given level of detail and geographic coordinates. +Returns the Quadbin representation of a point for a requested resolution and geographic coordinates. * `longitude`: `DOUBLE PRECISION` longitude (WGS84) of the point. * `latitude`: `DOUBLE PRECISION` latitude (WGS84) of the point. diff --git a/clouds/postgres/modules/doc/quadbin/QUADBIN_POLYFILL.md b/clouds/postgres/modules/doc/quadbin/QUADBIN_POLYFILL.md index 5634f4200..a6ffed008 100644 --- a/clouds/postgres/modules/doc/quadbin/QUADBIN_POLYFILL.md +++ b/clouds/postgres/modules/doc/quadbin/QUADBIN_POLYFILL.md @@ -6,7 +6,7 @@ QUADBIN_POLYFILL(geom, resolution [, mode]) **Description** -Returns an array of quadbin cell indexes contained in the given geometry at a given level of detail. Containment is determined by the mode: center, intersects, contains. +Returns an array of quadbin cell indexes contained in the given geometry at a requested resolution. Containment is determined by the mode: center, intersects, contains. * `geom`: `GEOMETRY` representing the shape to cover. * `resolution`: `INT` level of detail. The value must be between 0 and 26. diff --git a/clouds/postgres/version b/clouds/postgres/version index f0bb29e76..3a3cd8cc8 100644 --- a/clouds/postgres/version +++ b/clouds/postgres/version @@ -1 +1 @@ -1.3.0 +1.3.1 diff --git a/clouds/redshift/CHANGELOG.md b/clouds/redshift/CHANGELOG.md index 2bb8d7e69..86dd8ef25 100644 --- a/clouds/redshift/CHANGELOG.md +++ b/clouds/redshift/CHANGELOG.md @@ -4,6 +4,10 @@ CARTO Analytics Toolbox Core for Redshift. All notable commits to this project will be documented in this file. +## [1.1.2] - 2024-10-28 + +- chore(h3,quadbin): added "geo" aliases for certain functions (#526) + ## [1.1.1] - 2024-04-18 - docs(processing): update voronoi doc (#492) diff --git a/clouds/redshift/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md b/clouds/redshift/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md index db16ae0cf..9bceebc8f 100644 --- a/clouds/redshift/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md +++ b/clouds/redshift/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md @@ -6,7 +6,7 @@ QUADBIN_FROMGEOGPOINT(point, resolution) **Description** -Returns the Quadbin of a given point at a given level of detail. This function is an alias for `QUADBIN_FROMGEOPOINT`. +Returns the Quadbin of a given point at a requested resolution. This function is an alias for `QUADBIN_FROMGEOPOINT`. * `point`: `GEOMETRY` point to get the Quadbin from. * `resolution`: `INT` level of detail or zoom. diff --git a/clouds/redshift/modules/doc/quadbin/QUADBIN_FROMLONGLAT.md b/clouds/redshift/modules/doc/quadbin/QUADBIN_FROMLONGLAT.md index 64988427d..ee210792f 100644 --- a/clouds/redshift/modules/doc/quadbin/QUADBIN_FROMLONGLAT.md +++ b/clouds/redshift/modules/doc/quadbin/QUADBIN_FROMLONGLAT.md @@ -6,7 +6,7 @@ QUADBIN_FROMLONGLAT(longitude, latitude, resolution) **Description** -Returns the Quadbin representation of a point for a given level of detail and geographic coordinates. +Returns the Quadbin representation of a point for a requested resolution and geographic coordinates. * `longitude`: `FLOAT8` longitude (WGS84) of the point. * `latitude`: `FLOAT8` latitude (WGS84) of the point. diff --git a/clouds/redshift/modules/doc/quadbin/QUADBIN_POLYFILL.md b/clouds/redshift/modules/doc/quadbin/QUADBIN_POLYFILL.md index b639558d1..4a003d066 100644 --- a/clouds/redshift/modules/doc/quadbin/QUADBIN_POLYFILL.md +++ b/clouds/redshift/modules/doc/quadbin/QUADBIN_POLYFILL.md @@ -6,7 +6,7 @@ QUADBIN_POLYFILL(geography, resolution) **Description** -Returns an array of Quadbins that intersect with the given geography at a given level of detail. +Returns an array of Quadbins that intersect with the given geography at a requested resolution. * `geography`: `GEOMETRY` geography to extract the Quadbins from. * `resolution`: `INT` level of detail or zoom. diff --git a/clouds/redshift/modules/doc/s2/S2_FROMGEOGPOINT.md b/clouds/redshift/modules/doc/s2/S2_FROMGEOGPOINT.md index a62c6f5fb..b541d8084 100644 --- a/clouds/redshift/modules/doc/s2/S2_FROMGEOGPOINT.md +++ b/clouds/redshift/modules/doc/s2/S2_FROMGEOGPOINT.md @@ -6,7 +6,7 @@ S2_FROMGEOGPOINT(point, resolution) **Description** -Returns the S2 cell ID of a given point at a given level of detail. +Returns the S2 cell ID of a given point at a requested resolution. * `point`: `GEOGRAPHY` vertical coordinate of the map. * `resolution`: `INT4` level of detail or zoom. diff --git a/clouds/redshift/modules/doc/s2/S2_FROMLONGLAT.md b/clouds/redshift/modules/doc/s2/S2_FROMLONGLAT.md index 6141521a0..958e53b53 100644 --- a/clouds/redshift/modules/doc/s2/S2_FROMLONGLAT.md +++ b/clouds/redshift/modules/doc/s2/S2_FROMLONGLAT.md @@ -6,7 +6,7 @@ S2_FROMLONGLAT(longitude, latitude, resolution) **Description** -Returns the S2 cell ID representation for a given level of detail and geographic coordinates. +Returns the S2 cell ID representation for a requested resolution and geographic coordinates. * `longitude`: `FLOAT8` horizontal coordinate of the map. * `latitude`: `FLOAT8` vertical coordinate of the map. diff --git a/clouds/redshift/version b/clouds/redshift/version index 524cb5524..45a1b3f44 100644 --- a/clouds/redshift/version +++ b/clouds/redshift/version @@ -1 +1 @@ -1.1.1 +1.1.2 diff --git a/clouds/snowflake/CHANGELOG.md b/clouds/snowflake/CHANGELOG.md index cd3735e94..f0e703b58 100644 --- a/clouds/snowflake/CHANGELOG.md +++ b/clouds/snowflake/CHANGELOG.md @@ -4,6 +4,10 @@ CARTO Analytics Toolbox Core for Snowflake. All notable commits to this project will be documented in this file. +## [1.2.6] - 2024-10-28 + +- chore(h3,quadbin): added "geo" aliases for certain functions (#526) + ## [1.2.5] - 2024-09-23 - feat: added warehouse option for SF (#524) diff --git a/clouds/snowflake/modules/doc/h3/H3_FROMGEOGPOINT.md b/clouds/snowflake/modules/doc/h3/H3_FROMGEOGPOINT.md index f6c5f1686..c4cf090db 100644 --- a/clouds/snowflake/modules/doc/h3/H3_FROMGEOGPOINT.md +++ b/clouds/snowflake/modules/doc/h3/H3_FROMGEOGPOINT.md @@ -6,7 +6,7 @@ H3_FROMGEOGPOINT(point, resolution) **Description** -Returns the H3 cell index that the point belongs to in the required `resolution`. It will return `null` on error (invalid geography type or resolution out of bounds). This function is an alias for `H3_FROMGEOPOINT`. +Returns the H3 cell index that the point belongs to in the requested `resolution`. It will return `null` on error (invalid geography type or resolution out of bounds). This function is an alias for `H3_FROMGEOPOINT`. * `point`: `GEOGRAPHY` point to get the H3 cell from. * `resolution`: `INT` number between 0 and 15 with the [H3 resolution](https://h3geo.org/docs/core-library/restable). diff --git a/clouds/snowflake/modules/doc/h3/H3_POLYFILL_TABLE.md b/clouds/snowflake/modules/doc/h3/H3_POLYFILL_TABLE.md index 4378802bf..60009de19 100644 --- a/clouds/snowflake/modules/doc/h3/H3_POLYFILL_TABLE.md +++ b/clouds/snowflake/modules/doc/h3/H3_POLYFILL_TABLE.md @@ -6,7 +6,7 @@ H3_POLYFILL_TABLE(input_query, resolution, mode, output_table) **Description** -Returns a table with the H3 cell indexes contained in the given geography at a given level of detail. Containment is determined by the mode: center, intersects, contains. All the attributes except the geography will be included in the output table, clustered by the h3 column. +Returns a table with the H3 cell indexes contained in the given geography at a requested resolution. Containment is determined by the mode: center, intersects, contains. All the attributes except the geography will be included in the output table, clustered by the h3 column. * `input_query`: `STRING` input data to polyfill. It must contain a column `geom` with the shape to cover. Additionally, other columns can be included. * `resolution`: `INT` level of detail. The value must be between 0 and 15 ([H3 resolution table](https://h3geo.org/docs/core-library/restable)). diff --git a/clouds/snowflake/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md b/clouds/snowflake/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md index ea240aecb..ef1c01cc2 100644 --- a/clouds/snowflake/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md +++ b/clouds/snowflake/modules/doc/quadbin/QUADBIN_FROMGEOGPOINT.md @@ -6,7 +6,7 @@ QUADBIN_FROMGEOGPOINT(point, resolution) **Description** -Returns the Quadbin of a given point at a given level of detail. This function is an alias for `QUADBIN_FROMGEOPOINT`. +Returns the Quadbin of a given point at a requested resolution. This function is an alias for `QUADBIN_FROMGEOPOINT`. * `point`: `GEOGRAPHY` point to get the Quadbin from. * `resolution`: `INT` level of detail or zoom. diff --git a/clouds/snowflake/modules/doc/quadbin/QUADBIN_FROMLONGLAT.md b/clouds/snowflake/modules/doc/quadbin/QUADBIN_FROMLONGLAT.md index d6e28cf3c..b57334cb3 100644 --- a/clouds/snowflake/modules/doc/quadbin/QUADBIN_FROMLONGLAT.md +++ b/clouds/snowflake/modules/doc/quadbin/QUADBIN_FROMLONGLAT.md @@ -6,7 +6,7 @@ QUADBIN_FROMLONGLAT(longitude, latitude, resolution) **Description** -Returns the Quadbin representation of a point for a given level of detail and geographic coordinates. +Returns the Quadbin representation of a point for a requested resolution and geographic coordinates. * `longitude`: `FLOAT64` longitude (WGS84) of the point. * `latitude`: `FLOAT64` latitude (WGS84) of the point. diff --git a/clouds/snowflake/modules/doc/quadbin/QUADBIN_POLYFILL.md b/clouds/snowflake/modules/doc/quadbin/QUADBIN_POLYFILL.md index 18f57a0e9..f8e54f0aa 100644 --- a/clouds/snowflake/modules/doc/quadbin/QUADBIN_POLYFILL.md +++ b/clouds/snowflake/modules/doc/quadbin/QUADBIN_POLYFILL.md @@ -6,7 +6,7 @@ QUADBIN_POLYFILL(geography, resolution) **Description** -Returns an array of Quadbins that intersect with the given geography at a given level of detail. +Returns an array of Quadbins that intersect with the given geography at a requested resolution. * `geography`: `GEOGRAPHY` geography to extract the Quadbins from. * `resolution`: `INT` level of detail or zoom. diff --git a/clouds/snowflake/modules/doc/s2/S2_FROMGEOGPOINT.md b/clouds/snowflake/modules/doc/s2/S2_FROMGEOGPOINT.md index aeb25feae..16d3bf577 100644 --- a/clouds/snowflake/modules/doc/s2/S2_FROMGEOGPOINT.md +++ b/clouds/snowflake/modules/doc/s2/S2_FROMGEOGPOINT.md @@ -6,7 +6,7 @@ S2_FROMGEOGPOINT(point, resolution) **Description** -Returns the S2 cell ID of a given point at a given level of detail. +Returns the S2 cell ID of a given point at a requested resolution. * `point`: `GEOGRAPHY` point to get the ID from. * `resolution`: `INT` level of detail or zoom. diff --git a/clouds/snowflake/version b/clouds/snowflake/version index c813fe116..3c43790f5 100644 --- a/clouds/snowflake/version +++ b/clouds/snowflake/version @@ -1 +1 @@ -1.2.5 +1.2.6 From 94532b07a148ed025baf28190f89dd650688b785 Mon Sep 17 00:00:00 2001 From: Valentin de la Cruz Barquero <6054336+vdelacruzb@users.noreply.github.com> Date: Tue, 5 Nov 2024 09:50:07 +0100 Subject: [PATCH 14/25] chore(sf): deploy snowflake in CARTO.CARTO when releasing (#536) --- .github/workflows/snowflake.yml | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/.github/workflows/snowflake.yml b/.github/workflows/snowflake.yml index f3ee08297..8a84d7d9f 100644 --- a/.github/workflows/snowflake.yml +++ b/.github/workflows/snowflake.yml @@ -88,6 +88,39 @@ jobs: cd clouds/snowflake make deploy diff="$GIT_DIFF" production=1 + deploy-internal-stable: + if: github.ref_name == 'stable' + needs: test + runs-on: ubuntu-20.04 + timeout-minutes: 20 + strategy: + matrix: + include: + - account: SF_ACCOUNT_CD + database: SF_DATABASE_STABLE_CD + user: SF_USER_CD + password: SF_PASSWORD_CD + role: SF_ROLE_CD + env: + SF_ACCOUNT: ${{ secrets[matrix.account] }} + SF_DATABASE: ${{ secrets[matrix.database] }} + SF_USER: ${{ secrets[matrix.user] }} + SF_PASSWORD: ${{ secrets[matrix.password] }} + SF_ROLE: ${{ secrets[matrix.role] }} + steps: + - name: Checkout repo + uses: actions/checkout@v2 + - name: Check diff + uses: technote-space/get-diff-action@v4 + - name: Setup node + uses: actions/setup-node@v1 + with: + node-version: ${{ env.NODE_VERSION }} + - name: Run deploy + run: | + cd clouds/snowflake + make deploy diff="$GIT_DIFF" production=1 + deploy-internal-app: if: github.ref_name == 'main' needs: test From b5d0edd408bee1d983038e3c7c64d32053d51fb3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jes=C3=BAs=20Arroyo=20Torrens?= Date: Fri, 29 Nov 2024 12:04:29 +0100 Subject: [PATCH 15/25] chore: update gh actions versions (#537) --- .github/workflows/bigquery-ded.yml | 6 +- .github/workflows/bigquery.yml | 18 +++--- .github/workflows/databricks.yml | 10 +-- .github/workflows/draft-new-release.yml | 82 ------------------------- .github/workflows/postgres.yml | 6 +- .github/workflows/publish-release.yml | 1 - .github/workflows/redshift.yml | 6 +- .github/workflows/snowflake.yml | 6 +- 8 files changed, 26 insertions(+), 109 deletions(-) delete mode 100644 .github/workflows/draft-new-release.yml diff --git a/.github/workflows/bigquery-ded.yml b/.github/workflows/bigquery-ded.yml index a4eb69dd6..636dbeb61 100644 --- a/.github/workflows/bigquery-ded.yml +++ b/.github/workflows/bigquery-ded.yml @@ -17,7 +17,7 @@ jobs: runs-on: ubuntu-20.04 timeout-minutes: 20 env: - GCLOUD_VERSION: 290.0.1 + GCLOUD_VERSION: 500.0.0 BQ_PROJECT: ${{ secrets.BQ_PROJECT_CD }} BQ_REGION: ${{ secrets.BQ_REGION_CD }} BQ_BUCKET: ${{ secrets.BQ_BUCKET_CD }} @@ -42,13 +42,13 @@ jobs: with: node-version: ${{ env.NODE_VERSION }} - name: Auth google - uses: google-github-actions/auth@v0 + uses: google-github-actions/auth@v2 with: credentials_json: ${{ secrets.BQCARTOCD_DEPLOY_CLOUD_EXTENSIONS_SA_BASE64 }} project_id: ${{ env.BQ_PROJECT }} create_credentials_file: true - name: Setup gcloud - uses: google-github-actions/setup-gcloud@v0 + uses: google-github-actions/setup-gcloud@v2 with: version: ${{ env.GCLOUD_VERSION }} project_id: ${{ env.BQ_PROJECT }} diff --git a/.github/workflows/bigquery.yml b/.github/workflows/bigquery.yml index 117dc49e2..ad315d205 100644 --- a/.github/workflows/bigquery.yml +++ b/.github/workflows/bigquery.yml @@ -18,7 +18,7 @@ env: NODE_VERSION: 14 PYTHON3_VERSION: 3.8.10 VIRTUALENV_VERSION: 20.21.1 - GCLOUD_VERSION: 290.0.1 + GCLOUD_VERSION: 500.0.0 jobs: @@ -40,13 +40,13 @@ jobs: with: node-version: ${{ env.NODE_VERSION }} - name: Auth google - uses: google-github-actions/auth@v0 + uses: google-github-actions/auth@v2 with: credentials_json: ${{ secrets.BQCARTOCI_DEPLOY_CLOUD_EXTENSIONS_SA_BASE64 }} project_id: ${{ env.BQ_PROJECT_CI }} create_credentials_file: true - name: Setup gcloud - uses: google-github-actions/setup-gcloud@v0 + uses: google-github-actions/setup-gcloud@v2 with: version: ${{ env.GCLOUD_VERSION }} project_id: ${{ env.BQ_PROJECT_CI }} @@ -85,13 +85,13 @@ jobs: - name: Check diff uses: technote-space/get-diff-action@v4 - name: Auth google - uses: google-github-actions/auth@v0 + uses: google-github-actions/auth@v2 with: credentials_json: ${{ secrets.BQCARTOCD_DEPLOY_CLOUD_EXTENSIONS_SA_BASE64 }} project_id: ${{ env.BQ_PROJECT }} create_credentials_file: true - name: Setup gcloud - uses: google-github-actions/setup-gcloud@v0 + uses: google-github-actions/setup-gcloud@v2 with: version: ${{ env.GCLOUD_VERSION }} project_id: ${{ env.BQ_PROJECT }} @@ -130,13 +130,13 @@ jobs: - name: Check diff uses: technote-space/get-diff-action@v4 - name: Auth google - uses: google-github-actions/auth@v0 + uses: google-github-actions/auth@v2 with: credentials_json: ${{ secrets.BQCARTO_DEPLOY_CLOUD_EXTENSIONS_SA_BASE64 }} project_id: ${{ matrix.project }} create_credentials_file: true - name: Setup gcloud - uses: google-github-actions/setup-gcloud@v0 + uses: google-github-actions/setup-gcloud@v2 with: version: ${{ env.GCLOUD_VERSION }} project_id: ${{ matrix.project }} @@ -157,13 +157,13 @@ jobs: - name: Checkout repo uses: actions/checkout@v2 - name: Auth google - uses: google-github-actions/auth@v0 + uses: google-github-actions/auth@v2 with: credentials_json: ${{ secrets.GCLOUD_PRODUCTION_RELEASE_SA }} project_id: ${{ secrets.GCLOUD_PRODUCTION_PROJECT }} create_credentials_file: true - name: Setup gcloud - uses: google-github-actions/setup-gcloud@v0 + uses: google-github-actions/setup-gcloud@v2 with: version: ${{ env.GCLOUD_VERSION }} project_id: ${{ secrets.GCLOUD_PRODUCTION_PROJECT }} diff --git a/.github/workflows/databricks.yml b/.github/workflows/databricks.yml index 90bcb5a19..23ad6904a 100644 --- a/.github/workflows/databricks.yml +++ b/.github/workflows/databricks.yml @@ -17,7 +17,7 @@ on: env: PYTHON3_VERSION: 3.8.11 VIRTUALENV_VERSION: 20.21.1 - GCLOUD_VERSION: 290.0.1 + GCLOUD_VERSION: 500.0.0 GOOGLE_SECRET_MANAGER_PROJECT: ${{ secrets.GOOGLE_SECRET_MANAGER_PROJECT }} jobs: @@ -35,7 +35,7 @@ jobs: - name: Checkout repo uses: actions/checkout@v3 - name: Auth google - uses: google-github-actions/auth@v0 + uses: google-github-actions/auth@v2 with: credentials_json: ${{ secrets.CARTODB_ON_GCP_TERRAFORM_CI_CD_SERVICE_ACCOUNT_KEY }} - name: Get secrets @@ -87,7 +87,7 @@ jobs: - name: Checkout repo uses: actions/checkout@v3 - name: Auth google - uses: google-github-actions/auth@v0 + uses: google-github-actions/auth@v2 with: credentials_json: ${{ secrets.CARTODB_ON_GCP_TERRAFORM_CI_CD_SERVICE_ACCOUNT_KEY }} - name: Get secrets @@ -138,13 +138,13 @@ jobs: - name: Setup virtualenv run: pip install virtualenv==${{ env.VIRTUALENV_VERSION }} - name: Auth google - uses: google-github-actions/auth@v0 + uses: google-github-actions/auth@v2 with: credentials_json: ${{ secrets.GCLOUD_PRODUCTION_RELEASE_SA }} project_id: ${{ secrets.GCLOUD_PRODUCTION_PROJECT }} create_credentials_file: true - name: Setup gcloud - uses: google-github-actions/setup-gcloud@v0 + uses: google-github-actions/setup-gcloud@v2 with: version: ${{ env.GCLOUD_VERSION }} project_id: ${{ secrets.GCLOUD_PRODUCTION_PROJECT }} diff --git a/.github/workflows/draft-new-release.yml b/.github/workflows/draft-new-release.yml deleted file mode 100644 index 71a432201..000000000 --- a/.github/workflows/draft-new-release.yml +++ /dev/null @@ -1,82 +0,0 @@ -name: "Draft new release" - -on: - workflow_dispatch: - inputs: - version: - description: 'The version you want to release in format yyyyy-MM, for example 2022-10' - required: true - -jobs: - draft-new-release: - name: "Draft a new release" - runs-on: ubuntu-20.04 - steps: - - uses: actions/checkout@v3 - with: - token: ${{ secrets.CARTOFANTE_GITHUB_TOKEN }} - - name: Check if release tag exists - run: | - if [[ $(git ls-remote --tags origin refs/tags/${{ github.event.inputs.version }}) ]]; then - echo "## Release ${{ github.event.inputs.version }} already exists" - exit 1 - fi - - uses: dorny/paths-filter@v2 - id: changes - with: - filters: | - bigquery: - - 'clouds/bigquery/version' - databricks: - - 'clouds/databricks/version' - postgres: - - 'clouds/postgres/version' - redshift: - - 'clouds/redshift/version' - snowflake: - - 'clouds/snowflake/version' - base: stable - - name: Update bq changelog - if: steps.changes.outputs.bigquery == 'true' - run: | - BQ_VERSION=$(cat clouds/bigquery/version) - sed -i "s/\[Unreleased\]/\[${BQ_VERSION}\] - ${{ github.event.inputs.version }}/g" clouds/bigquery/CHANGELOG.md - - name: Update databricks changelog - if: steps.changes.outputs.databricks == 'true' - run: | - DB_VERSION=$(cat clouds/snowflake/version) - sed -i "s/\[Unreleased\]/\[${DB_VERSION}\] - ${{ github.event.inputs.version }}/g" clouds/databricks/CHANGELOG.md - - name: Update postgres changelog - if: steps.changes.outputs.postgres == 'true' - run: | - PG_VERSION=$(cat clouds/snowflake/version) - sed -i "s/\[Unreleased\]/\[${PG_VERSION}\] - ${{ github.event.inputs.version }}/g" clouds/postgres/CHANGELOG.md - - name: Update redshift changelog - if: steps.changes.outputs.redshift == 'true' - run: | - RS_VERSION=$(cat clouds/snowflake/version) - sed -i "s/\[Unreleased\]/\[${RS_VERSION}\] - ${{ github.event.inputs.version }}/g" clouds/redshift/CHANGELOG.md - - name: Update snowflake changelog - if: steps.changes.outputs.snowflake == 'true' - run: | - SF_VERSION=$(cat clouds/snowflake/version) - sed -i "s/\[Unreleased\]/\[${SF_VERSION}\] - ${{ github.event.inputs.version }}/g" clouds/snowflake/CHANGELOG.md - - uses: stefanzweifel/git-auto-commit-action@v4 - with: - commit_message: edit changelog versions - branch: release/${{ github.event.inputs.version }} - create_branch: true - commit_user_name: ${{ secrets.CARTOFANTE_USERNAME }} - commit_user_email: ${{ secrets.CARTOFANTE_EMAIL }} - - name: Create pull request - uses: repo-sync/pull-request@v2 - with: - source_branch: release/${{ github.event.inputs.version }} - destination_branch: stable - pr_title: Release version ${{ github.event.inputs.version }} - pr_reviewer: ${{ github.actor }} # By default, we request a review from the person who triggered the workflow. - pr_body: | - This PR was created in response to a manual trigger of the release workflow here: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}. - Please, check that the version files and changelogs are correct. - This workflow will deploy the DW whose versions have been updated in dedicated environments - Merging this PR will create a GitHub release and publish the AT libraries those DW. diff --git a/.github/workflows/postgres.yml b/.github/workflows/postgres.yml index 8ec6e0a88..813c620d2 100644 --- a/.github/workflows/postgres.yml +++ b/.github/workflows/postgres.yml @@ -18,7 +18,7 @@ env: NODE_VERSION: 14 PYTHON3_VERSION: 3.8.10 VIRTUALENV_VERSION: 20.21.1 - GCLOUD_VERSION: 290.0.1 + GCLOUD_VERSION: 500.0.0 jobs: @@ -116,13 +116,13 @@ jobs: - name: Setup virtualenv run: pip install virtualenv==${{ env.VIRTUALENV_VERSION }} - name: Auth google - uses: google-github-actions/auth@v0 + uses: google-github-actions/auth@v2 with: credentials_json: ${{ secrets.GCLOUD_PRODUCTION_RELEASE_SA }} project_id: ${{ secrets.GCLOUD_PRODUCTION_PROJECT }} create_credentials_file: true - name: Setup gcloud - uses: google-github-actions/setup-gcloud@v0 + uses: google-github-actions/setup-gcloud@v2 with: version: ${{ env.GCLOUD_VERSION }} project_id: ${{ secrets.GCLOUD_PRODUCTION_PROJECT }} diff --git a/.github/workflows/publish-release.yml b/.github/workflows/publish-release.yml index ca5cefc49..35d806c76 100644 --- a/.github/workflows/publish-release.yml +++ b/.github/workflows/publish-release.yml @@ -97,7 +97,6 @@ jobs: pr_title: Merge release into main branch pr_body: | This PR merges the release branch back into main. - This happens to ensure that the updates that happend on the stable branch. - name: Merge PR if possible continue-on-error: true env: diff --git a/.github/workflows/redshift.yml b/.github/workflows/redshift.yml index 35efbd419..5fbe9a858 100644 --- a/.github/workflows/redshift.yml +++ b/.github/workflows/redshift.yml @@ -19,7 +19,7 @@ env: PYTHON2_VERSION: 2.7.18 PYTHON3_VERSION: 3.8.10 VIRTUALENV_VERSION: 20.15.1 - GCLOUD_VERSION: 290.0.1 + GCLOUD_VERSION: 500.0.0 jobs: @@ -169,13 +169,13 @@ jobs: unzip awscliv2.zip ./aws/install - name: Auth google - uses: google-github-actions/auth@v0 + uses: google-github-actions/auth@v2 with: credentials_json: ${{ secrets.GCLOUD_PRODUCTION_RELEASE_SA }} project_id: ${{ secrets.GCLOUD_PRODUCTION_PROJECT }} create_credentials_file: true - name: Setup gcloud - uses: google-github-actions/setup-gcloud@v0 + uses: google-github-actions/setup-gcloud@v2 with: version: ${{ env.GCLOUD_VERSION }} project_id: ${{ secrets.GCLOUD_PRODUCTION_PROJECT }} diff --git a/.github/workflows/snowflake.yml b/.github/workflows/snowflake.yml index 8a84d7d9f..456e76ea4 100644 --- a/.github/workflows/snowflake.yml +++ b/.github/workflows/snowflake.yml @@ -18,7 +18,7 @@ env: NODE_VERSION: 18.16 PYTHON3_VERSION: 3.8.18 VIRTUALENV_VERSION: 20.21.1 - GCLOUD_VERSION: 290.0.1 + GCLOUD_VERSION: 500.0.0 jobs: @@ -195,13 +195,13 @@ jobs: with: node-version: ${{ env.NODE_VERSION }} - name: Auth google - uses: google-github-actions/auth@v0 + uses: google-github-actions/auth@v2 with: credentials_json: ${{ secrets.GCLOUD_PRODUCTION_RELEASE_SA }} project_id: ${{ secrets.GCLOUD_PRODUCTION_PROJECT }} create_credentials_file: true - name: Setup gcloud - uses: google-github-actions/setup-gcloud@v0 + uses: google-github-actions/setup-gcloud@v2 with: version: ${{ env.GCLOUD_VERSION }} project_id: ${{ secrets.GCLOUD_PRODUCTION_PROJECT }} From e604e835112027473bb547f757b21f0415977098 Mon Sep 17 00:00:00 2001 From: Valentin de la Cruz Barquero <6054336+vdelacruzb@users.noreply.github.com> Date: Wed, 11 Dec 2024 16:25:16 +0100 Subject: [PATCH 16/25] chore(bq): add skip dependency tag in bigquery build_modules (#538) --- clouds/bigquery/common/build_modules.js | 1 + 1 file changed, 1 insertion(+) diff --git a/clouds/bigquery/common/build_modules.js b/clouds/bigquery/common/build_modules.js index e691541c1..e98af1770 100755 --- a/clouds/bigquery/common/build_modules.js +++ b/clouds/bigquery/common/build_modules.js @@ -163,6 +163,7 @@ function apply_replacements (text) { text = text.replace(pattern, process.env[replacement]); } } + text = text.replace(/@@SKIP_DEP@@/g, ''); return text; } From f1025dead72f4171753fc55bf7c7551f88f75cbb Mon Sep 17 00:00:00 2001 From: Valentin de la Cruz Barquero <6054336+vdelacruzb@users.noreply.github.com> Date: Wed, 18 Dec 2024 17:19:48 +0100 Subject: [PATCH 17/25] docs(sf|h3): update h3_polyfill_table docs as it does not support points or lines (#539) --- .../snowflake/modules/doc/h3/H3_POLYFILL_TABLE.md | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/clouds/snowflake/modules/doc/h3/H3_POLYFILL_TABLE.md b/clouds/snowflake/modules/doc/h3/H3_POLYFILL_TABLE.md index 60009de19..2a8007d4d 100644 --- a/clouds/snowflake/modules/doc/h3/H3_POLYFILL_TABLE.md +++ b/clouds/snowflake/modules/doc/h3/H3_POLYFILL_TABLE.md @@ -6,15 +6,14 @@ H3_POLYFILL_TABLE(input_query, resolution, mode, output_table) **Description** -Returns a table with the H3 cell indexes contained in the given geography at a requested resolution. Containment is determined by the mode: center, intersects, contains. All the attributes except the geography will be included in the output table, clustered by the h3 column. +Returns a table with the H3 cell indexes contained in the given polygon at a requested resolution. Containment is determined by the mode: center, intersects, contains. All the attributes except the polygon will be included in the output table, clustered by the h3 column. * `input_query`: `STRING` input data to polyfill. It must contain a column `geom` with the shape to cover. Additionally, other columns can be included. -* `resolution`: `INT` level of detail. The value must be between 0 and 15 ([H3 resolution table](https://h3geo.org/docs/core-library/restable)). -* `mode`: `STRING` - * `center` returns the indexes of the H3 cells which centers intersect the input geography (polygon). The resulting H3 set does not fully cover the input geography, however, this is **significantly faster** that the other modes. This mode is not compatible with points or lines. Equivalent to [`H3_POLYFILL`](h3#h3_polyfill). - * `intersects` returns the indexes of the H3 cells that intersect the input geography. The resulting H3 set will completely cover the input geography (point, line, polygon). - * `contains` returns the indexes of the H3 cells that are entirely contained inside the input geography (polygon). This mode is not compatible with points or lines. -* `output_table`: `STRING` name of the output table to store the results of the polyfill. +* `resolution`: `INT` number between 0 and 15 with the [H3 resolution](https://h3geo.org/docs/core-library/restable). +* `mode`: `STRING` ``. Optional. Defaults to 'center' mode. + * `center` The center point of the H3 cell must be within the polygon + * `contains` The H3 cell must be fully contained within the polygon (least inclusive) + * `intersects` The H3 cell intersects in any way with the polygon (most inclusive) Mode `center`: From b5649a71c7d3b7d7b39dfac02b1388ccf6fd19c2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Dec 2024 09:56:23 +0100 Subject: [PATCH 18/25] chore(deps): bump jinja2 from 3.1.3 to 3.1.5 in /clouds/databricks/common (#540) --- clouds/databricks/common/requirements_create_it.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clouds/databricks/common/requirements_create_it.txt b/clouds/databricks/common/requirements_create_it.txt index cd1737d67..7cb100d62 100644 --- a/clouds/databricks/common/requirements_create_it.txt +++ b/clouds/databricks/common/requirements_create_it.txt @@ -1 +1 @@ -Jinja2==3.1.3 +Jinja2==3.1.5 From 14b2b5ccf2dd77d78468c44beb8345af77fa4453 Mon Sep 17 00:00:00 2001 From: Valentin de la Cruz Barquero <6054336+vdelacruzb@users.noreply.github.com> Date: Thu, 23 Jan 2025 13:09:18 +0100 Subject: [PATCH 19/25] fix(bq|h3,quadbin): H3_POLYFILL and QUADBIN_POLYFILL functions not working with holes (#542) --- clouds/bigquery/modules/sql/h3/H3_POLYFILL.sql | 5 ++++- clouds/bigquery/modules/sql/quadbin/QUADBIN_POLYFILL.sql | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/clouds/bigquery/modules/sql/h3/H3_POLYFILL.sql b/clouds/bigquery/modules/sql/h3/H3_POLYFILL.sql index c4c342476..581d39bc7 100644 --- a/clouds/bigquery/modules/sql/h3/H3_POLYFILL.sql +++ b/clouds/bigquery/modules/sql/h3/H3_POLYFILL.sql @@ -130,7 +130,10 @@ AS (( IF(resolution < 0 OR resolution > 15, ERROR('Invalid resolution, should be between 0 and 15'), ( WITH __bbox AS ( - SELECT ST_BOUNDINGBOX(geog) AS box + SELECT IF(ST_DIMENSION(geog) = 2, + ST_BOUNDINGBOX(`@@BQ_DATASET@@.ST_ENVELOPE`(ARRAY[geog])), + ST_BOUNDINGBOX(geog) + ) AS box ), __params AS ( SELECT diff --git a/clouds/bigquery/modules/sql/quadbin/QUADBIN_POLYFILL.sql b/clouds/bigquery/modules/sql/quadbin/QUADBIN_POLYFILL.sql index 9372273b8..6d53728d8 100644 --- a/clouds/bigquery/modules/sql/quadbin/QUADBIN_POLYFILL.sql +++ b/clouds/bigquery/modules/sql/quadbin/QUADBIN_POLYFILL.sql @@ -7,7 +7,10 @@ CREATE OR REPLACE FUNCTION `@@BQ_DATASET@@.__QUADBIN_POLYFILL_INIT` RETURNS ARRAY AS (( WITH __bbox AS ( - SELECT ST_BOUNDINGBOX(geog) AS box + SELECT IF(ST_DIMENSION(geog) = 2, + ST_BOUNDINGBOX(`@@BQ_DATASET@@.ST_ENVELOPE`(ARRAY[geog])), + ST_BOUNDINGBOX(geog) + ) AS box ), __params AS ( SELECT From a77c45305fdac74e40ff819d707f7b111102eb93 Mon Sep 17 00:00:00 2001 From: Valentin de la Cruz Barquero <6054336+vdelacruzb@users.noreply.github.com> Date: Fri, 24 Jan 2025 10:28:25 +0100 Subject: [PATCH 20/25] chores(bq): split JS libraries generation (#541) --- clouds/bigquery/Makefile | 6 +- clouds/bigquery/common/Makefile | 8 +- clouds/bigquery/common/build_modules.js | 20 + clouds/bigquery/common/list_libraries.js | 145 +++++ clouds/bigquery/common/rollup.config.js | 5 + clouds/bigquery/libraries/javascript/Makefile | 32 +- .../javascript/{src => libs}/accessors.js | 0 .../javascript/{src => libs}/clustering.js | 0 .../javascript/{src => libs}/constructors.js | 0 .../libraries/javascript/{src => libs}/h3.js | 0 .../libraries/javascript/libs/index.js | 25 - .../javascript/{src => libs}/measurements.js | 0 .../javascript/{src => libs}/placekey.js | 0 .../javascript/{src => libs}/processing.js | 0 .../libraries/javascript/libs/quadkey.js | 25 + .../libraries/javascript/libs/random.js | 8 + .../bigquery/libraries/javascript/libs/s2.js | 9 + .../{src => libs}/transformations.js | 0 .../libraries/javascript/src/quadkey.js | 335 +++++++++- .../libraries/javascript/src/quadkey/index.js | 310 --------- .../libraries/javascript/src/random.js | 16 +- .../bigquery/libraries/javascript/src/s2.js | 605 +++++++++++++++++- .../libraries/javascript/src/s2/index.js | 596 ----------------- .../javascript/test/accessors.test.js | 7 + .../javascript/test/clustering.test.js | 10 +- .../javascript/test/constructors.test.js | 6 + .../libraries/javascript/test/h3.test.js | 17 + .../libraries/javascript/test/index.test.js | 81 --- .../javascript/test/measurements.test.js | 9 + .../javascript/test/placekey.test.js | 7 + .../javascript/test/processing.test.js | 8 + .../libraries/javascript/test/quadkey.test.js | 63 +- .../libraries/javascript/test/random.test.js | 4 +- .../libraries/javascript/test/s2.test.js | 9 + .../javascript/test/transformations.test.js | 14 + clouds/bigquery/modules/Makefile | 4 +- .../modules/sql/accessors/ST_ENVELOPE.sql | 6 +- .../sql/clustering/ST_CLUSTERKMEANS.sql | 6 +- .../sql/constructors/ST_BEZIERSPLINE.sql | 4 +- .../sql/constructors/ST_MAKEELLIPSE.sql | 4 +- .../bigquery/modules/sql/h3/H3_BOUNDARY.sql | 6 +- clouds/bigquery/modules/sql/h3/H3_CENTER.sql | 6 +- clouds/bigquery/modules/sql/h3/H3_COMPACT.sql | 4 +- .../bigquery/modules/sql/h3/H3_DISTANCE.sql | 4 +- .../modules/sql/h3/H3_FROMLONGLAT.sql | 4 +- clouds/bigquery/modules/sql/h3/H3_HEXRING.sql | 6 +- .../bigquery/modules/sql/h3/H3_ISPENTAGON.sql | 4 +- clouds/bigquery/modules/sql/h3/H3_ISVALID.sql | 4 +- clouds/bigquery/modules/sql/h3/H3_KRING.sql | 6 +- .../modules/sql/h3/H3_KRING_DISTANCES.sql | 6 +- .../bigquery/modules/sql/h3/H3_POLYFILL.sql | 22 +- .../bigquery/modules/sql/h3/H3_RESOLUTION.sql | 6 +- .../bigquery/modules/sql/h3/H3_TOCHILDREN.sql | 6 +- .../bigquery/modules/sql/h3/H3_TOPARENT.sql | 6 +- .../bigquery/modules/sql/h3/H3_UNCOMPACT.sql | 4 +- .../modules/sql/measurements/ST_ANGLE.sql | 4 +- .../sql/measurements/ST_MINKOWSKIDISTANCE.sql | 6 +- .../modules/sql/placekey/PLACEKEY_FROMH3.sql | 4 +- .../modules/sql/placekey/PLACEKEY_ISVALID.sql | 4 +- .../modules/sql/placekey/PLACEKEY_TOH3.sql | 6 +- .../sql/processing/__VORONOIHELPER.sql | 8 +- .../sql/quadkey/QUADINT_FROMLONGLAT.sql | 4 +- .../quadkey/QUADINT_FROMLONGLAT_ZOOMRANGE.sql | 6 +- .../sql/quadkey/QUADINT_FROMQUADKEY.sql | 4 +- .../modules/sql/quadkey/QUADINT_POLYFILL.sql | 6 +- .../sql/quadkey/QUADINT_TOCHILDREN.sql | 4 +- .../modules/sql/quadkey/QUADINT_TOQUADKEY.sql | 4 +- .../modules/sql/random/ST_GENERATEPOINTS.sql | 4 +- .../bigquery/modules/sql/s2/S2_BOUNDARY.sql | 4 +- clouds/bigquery/modules/sql/s2/S2_CENTER.sql | 4 +- .../modules/sql/s2/S2_FROMHILBERTQUADKEY.sql | 4 +- .../modules/sql/s2/S2_FROMLONGLAT.sql | 6 +- .../modules/sql/s2/S2_TOHILBERTQUADKEY.sql | 4 +- .../modules/sql/transformations/ST_BUFFER.sql | 4 +- .../sql/transformations/ST_CENTERMEAN.sql | 4 +- .../sql/transformations/ST_CENTERMEDIAN.sql | 4 +- .../sql/transformations/ST_CENTEROFMASS.sql | 4 +- .../sql/transformations/ST_CONCAVEHULL.sql | 14 +- .../sql/transformations/ST_DESTINATION.sql | 4 +- .../sql/transformations/ST_GREATCIRCLE.sql | 4 +- .../ST_LINE_INTERPOLATE_POINT.sql | 4 +- .../sql/transformations/ST_POINTONSURFACE.sql | 4 +- 82 files changed, 1400 insertions(+), 1230 deletions(-) create mode 100755 clouds/bigquery/common/list_libraries.js rename clouds/bigquery/libraries/javascript/{src => libs}/accessors.js (100%) rename clouds/bigquery/libraries/javascript/{src => libs}/clustering.js (100%) rename clouds/bigquery/libraries/javascript/{src => libs}/constructors.js (100%) rename clouds/bigquery/libraries/javascript/{src => libs}/h3.js (100%) delete mode 100644 clouds/bigquery/libraries/javascript/libs/index.js rename clouds/bigquery/libraries/javascript/{src => libs}/measurements.js (100%) rename clouds/bigquery/libraries/javascript/{src => libs}/placekey.js (100%) rename clouds/bigquery/libraries/javascript/{src => libs}/processing.js (100%) create mode 100644 clouds/bigquery/libraries/javascript/libs/quadkey.js create mode 100644 clouds/bigquery/libraries/javascript/libs/random.js create mode 100644 clouds/bigquery/libraries/javascript/libs/s2.js rename clouds/bigquery/libraries/javascript/{src => libs}/transformations.js (100%) delete mode 100644 clouds/bigquery/libraries/javascript/src/quadkey/index.js delete mode 100644 clouds/bigquery/libraries/javascript/src/s2/index.js create mode 100644 clouds/bigquery/libraries/javascript/test/accessors.test.js create mode 100644 clouds/bigquery/libraries/javascript/test/constructors.test.js create mode 100644 clouds/bigquery/libraries/javascript/test/h3.test.js delete mode 100644 clouds/bigquery/libraries/javascript/test/index.test.js create mode 100644 clouds/bigquery/libraries/javascript/test/measurements.test.js create mode 100644 clouds/bigquery/libraries/javascript/test/placekey.test.js create mode 100644 clouds/bigquery/libraries/javascript/test/processing.test.js create mode 100644 clouds/bigquery/libraries/javascript/test/s2.test.js create mode 100644 clouds/bigquery/libraries/javascript/test/transformations.test.js diff --git a/clouds/bigquery/Makefile b/clouds/bigquery/Makefile index 5310196a7..03d43e064 100644 --- a/clouds/bigquery/Makefile +++ b/clouds/bigquery/Makefile @@ -4,6 +4,7 @@ ROOT_DIR := $(shell dirname $(abspath $(lastword $(MAKEFILE_LIST)))) DIST_DIR ?= $(ROOT_DIR)/dist BUILD_DIR ?= $(ROOT_DIR)/build +MODULES_DIRS ?= $(ROOT_DIR)/modules ESLINTRC_DIR ?= $(ROOT_DIR)/../.. COMMON_DIR = $(ROOT_DIR)/common PACKAGE_VERSION ?= $(shell cat $(ROOT_DIR)/version) @@ -49,7 +50,10 @@ build: build-libraries: mkdir -p $(BUILD_DIR)/libs $(MAKE) -C libraries/javascript build - cp libraries/javascript/build/index.js $(BUILD_DIR)/libs/$(BQ_LIBRARY_DEFAULT).js + $(COMMON_DIR)/list_libraries.js $(MODULES_DIRS) --diff="$(diff)" --modules=$(modules) --functions=$(functions) --nodeps=$(nodeps) --makelib=$(MAKE_LIB) 1>/dev/null # Check errors + for f in `$(COMMON_DIR)/list_libraries.js $(MODULES_DIRS) --diff="$(diff)" --modules=$(modules) --functions=$(functions) --nodeps=$(nodeps) --makelib=$(MAKE_LIB)`; do \ + cp libraries/javascript/build/$${f}.js $(BUILD_DIR)/libs/$(BQ_LIBRARY_DEFAULT)_$${f}.js; \ + done ifdef MAKE_LIB cp libraries/javascript/build/index_$(MAKE_LIB).js $(BUILD_DIR)/libs/$(BQ_LIBRARY_DEFAULT)_$(MAKE_LIB).js endif diff --git a/clouds/bigquery/common/Makefile b/clouds/bigquery/common/Makefile index 57e536739..6cd71dfe4 100644 --- a/clouds/bigquery/common/Makefile +++ b/clouds/bigquery/common/Makefile @@ -10,11 +10,11 @@ BQ_DATASET_DEFAULT = carto BQ_LIBRARY_DEFAULT ?= carto_analytics_toolbox_core ifeq ($(production),1) -export BQ_LIBRARY_BUCKET = $(BQ_BUCKET)/$(BQ_DATASET_DEFAULT)/libs/$(BQ_LIBRARY_DEFAULT).js -export BQ_LIBRARY_TILER_BUCKET = $(BQ_BUCKET)/$(BQ_DATASET_DEFAULT)/libs/$(BQ_LIBRARY_DEFAULT)_tiler.js +export BQ_LIBRARY_BUCKET = $(BQ_BUCKET)/$(BQ_DATASET_DEFAULT)/libs/$(BQ_LIBRARY_DEFAULT) +export BQ_LIBRARY_TILER_BUCKET = $(BQ_LIBRARY_BUCKET)_tiler.js else -export BQ_LIBRARY_BUCKET = $(BQ_BUCKET)/$(BQ_PREFIX)$(BQ_DATASET_DEFAULT)/libs/$(BQ_LIBRARY_DEFAULT).js -export BQ_LIBRARY_TILER_BUCKET = $(BQ_BUCKET)/$(BQ_PREFIX)$(BQ_DATASET_DEFAULT)/libs/$(BQ_LIBRARY_DEFAULT)_tiler.js +export BQ_LIBRARY_BUCKET = $(BQ_BUCKET)/$(BQ_PREFIX)$(BQ_DATASET_DEFAULT)/libs/$(BQ_LIBRARY_DEFAULT) +export BQ_LIBRARY_TILER_BUCKET = $(BQ_LIBRARY_BUCKET)_tiler.js endif diff --git a/clouds/bigquery/common/build_modules.js b/clouds/bigquery/common/build_modules.js index e98af1770..41f6c3bc7 100755 --- a/clouds/bigquery/common/build_modules.js +++ b/clouds/bigquery/common/build_modules.js @@ -14,8 +14,11 @@ const argv = require('minimist')(process.argv.slice(2)); const inputDirs = argv._[0] && argv._[0].split(','); const outputDir = argv.output || 'build'; +const libsBuildDir = argv.libs_build_dir || '../libraries/javascript/build'; const diff = argv.diff || []; const nodeps = argv.nodeps; +const libraryBucket = argv.librarybucket; +const makelib = argv.makelib; let modulesFilter = (argv.modules && argv.modules.split(',')) || []; let functionsFilter = (argv.functions && argv.functions.split(',')) || []; let all = !(diff.length || modulesFilter.length || functionsFilter.length); @@ -156,6 +159,23 @@ if (argv.production) { let content = output.map(f => f.content).join(separator); function apply_replacements (text) { + const libraries = [... new Set(text.match(new RegExp('@@BQ_LIBRARY_.*_BUCKET@@', 'g')))]; + for (let library of libraries) { + let libraryName = library.replace('@@BQ_LIBRARY_', '').replace('_BUCKET@@', '').toLowerCase(); + if (makelib == libraryName) { + continue; + } + libraryName += '.js'; + const libraryPath = path.join(libsBuildDir, libraryName); + if (fs.existsSync(libraryPath)) { + const libraryBucketPath = libraryBucket + '_' + libraryName; + text = text.replace(new RegExp(library, 'g'), libraryBucketPath); + } + else { + console.log(`Warning: library "${libraryName}" does not exist. Run "make build-libraries" with the same filters.`); + process.exit(1); + } + } const replacements = process.env.REPLACEMENTS.split(' '); for (let replacement of replacements) { if (replacement) { diff --git a/clouds/bigquery/common/list_libraries.js b/clouds/bigquery/common/list_libraries.js new file mode 100755 index 000000000..878c35e17 --- /dev/null +++ b/clouds/bigquery/common/list_libraries.js @@ -0,0 +1,145 @@ +#!/usr/bin/env node + +// List the JavaScript libraries based on the input filters to the SQL functions + + +// ./build_modules.js modules --output=build --diff="clouds/bigquery/modules/sql/quadbin/QUADBIN_TOZXY.sql" +// ./build_modules.js modules --output=build --functions=ST_TILEENVELOPE +// ./build_modules.js modules --output=build --modules=quadbin +// ./build_modules.js modules --output=build --production --dropfirst + +const fs = require('fs'); +const path = require('path'); +const argv = require('minimist')(process.argv.slice(2)); + +const inputDirs = argv._[0] && argv._[0].split(','); +const diff = argv.diff || []; +const nodeps = argv.nodeps; +const makelib = argv.makelib; +let modulesFilter = (argv.modules && argv.modules.split(',')) || []; +let functionsFilter = (argv.functions && argv.functions.split(',')) || []; +let all = !(diff.length || modulesFilter.length || functionsFilter.length); + +// Convert diff to modules/functions +if (diff.length) { + const patternsAll = [ + /\.github\/workflows\/bigquery\.yml/, + /clouds\/bigquery\/common\/.+/, + /clouds\/bigquery\/libraries\/.+/, + /clouds\/bigquery\/.*Makefile/, + /clouds\/bigquery\/version/ + ]; + const patternModulesSql = /clouds\/bigquery\/modules\/sql\/([^\s]*?)\//g; + const patternModulesTest = /clouds\/bigquery\/modules\/test\/([^\s]*?)\//g; + const diffAll = patternsAll.some(p => diff.match(p)); + if (diffAll) { + all = diffAll; + } else { + const modulesSql = [...diff.matchAll(patternModulesSql)].map(m => m[1]); + const modulesTest = [...diff.matchAll(patternModulesTest)].map(m => m[1]); + const diffModulesFilter = [...new Set(modulesSql.concat(modulesTest))]; + if (diffModulesFilter) { + modulesFilter = diffModulesFilter; + } + } +} + +// Extract functions +const functions = []; +for (let inputDir of inputDirs) { + const sqldir = path.join(inputDir, 'sql'); + const modules = fs.readdirSync(sqldir); + modules.forEach(module => { + const moduledir = path.join(sqldir, module); + if (fs.statSync(moduledir).isDirectory()) { + const files = fs.readdirSync(moduledir); + files.forEach(file => { + if (file.endsWith('.sql')) { + const name = path.parse(file).name; + const content = fs.readFileSync(path.join(moduledir, file)).toString().replace(/--.*\n/g, ''); + functions.push({ + name, + module, + content, + dependencies: [] + }); + } + }); + } + }); +} + +// Check filters +modulesFilter.forEach(m => { + if (!functions.map(fn => fn.module).includes(m)) { + process.stderr.write(`ERROR: Module not found ${m}\n`); + process.exit(1); + } +}); +functionsFilter.forEach(f => { + if (!functions.map(fn => fn.name).includes(f)) { + process.stderr.write(`ERROR: Function not found ${f}`); + process.exit(1); + } +}); + +// Extract function dependencies +if (!nodeps) { + functions.forEach(mainFunction => { + functions.forEach(depFunction => { + if (mainFunction.name != depFunction.name) { + const depFunctionMatches = []; + depFunctionMatches.push(...depFunction.content.replace(/(\r\n|\n|\r)/gm,' ').matchAll(new RegExp('(?<=(? { + let qualifiedDepFunctName = depFunctionMatch[0].replace(/[ \p{Diacritic}]/gu, '').split('(')[0]; + qualifiedDepFunctName = qualifiedDepFunctName.split('.'); + depFunctionNames.push(qualifiedDepFunctName[qualifiedDepFunctName.length - 1]); + }) + if (depFunctionNames.some((depFunctionName) => mainFunction.content.includes(`DATASET@@.${depFunctionName}\`(`))) { + mainFunction.dependencies.push(depFunction.name); + } + } + }); + }); +} + +// Check circular dependencies +functions.forEach(mainFunction => { + functions.forEach(depFunction => { + if (mainFunction.dependencies.includes(depFunction.name) && + depFunction.dependencies.includes(mainFunction.name)) { + process.stderr.write(`ERROR: Circular dependency between ${mainFunction.name} and ${depFunction.name}`); + process.exit(1); + } + }); +}); + + +// Filter functions +const output = []; +function add (f, include) { + include = include || all || functionsFilter.includes(f.name) || modulesFilter.includes(f.module); + for (const dependency of f.dependencies) { + add(functions.find(f => f.name === dependency), include); + } + if (!output.map(f => f.name).includes(f.name) && include) { + output.push({ + name: f.name, + content: f.content + }); + } +} +functions.forEach(f => add(f)); + +const content = output.map(f => f.content).join('\n'); +let libraries = [... new Set(content.match(new RegExp('@@BQ_LIBRARY_.*_BUCKET@@', 'g')))] + .map(l => l.replace('@@BQ_LIBRARY_', '').replace('_BUCKET@@', '').toLowerCase()); + +// Exclude libraries pointed by makelib as they are deployed separately +if (makelib) { + libraries = libraries.filter(l => l !== makelib); +} + +process.stdout.write(libraries.join(' ')); \ No newline at end of file diff --git a/clouds/bigquery/common/rollup.config.js b/clouds/bigquery/common/rollup.config.js index a0af9d73e..d23eb64df 100644 --- a/clouds/bigquery/common/rollup.config.js +++ b/clouds/bigquery/common/rollup.config.js @@ -18,6 +18,11 @@ for (let dir of dirs) { } } +if (!input && filename) { + console.log(`Error: library "${filename}" does not exist. Add it or revisit the replacement "@@BQ_LIBRARY_${path.parse(filename).name.toUpperCase()}@@" in one of your sql files.`); + process.exit(1); +} + // Format library name to camel case const name = process.env.NAME.replace(/(_\w)/g, k => k[1].toUpperCase()); diff --git a/clouds/bigquery/libraries/javascript/Makefile b/clouds/bigquery/libraries/javascript/Makefile index f39502fca..5dad8f37b 100644 --- a/clouds/bigquery/libraries/javascript/Makefile +++ b/clouds/bigquery/libraries/javascript/Makefile @@ -45,12 +45,27 @@ ifdef MAKE_LIB endif build-libs: build-install $(NODE_MODULES_DEV) - NAME=lib \ - PATH="$(NODE_MODULES_DEV)/.bin/:$(PATH)" \ - DIRS=$(LIBS_DIR) \ - FILENAME=index.js \ - OUTPUT=$(BUILD_DIR)/index.js \ - rollup --config $(COMMON_DIR)/rollup.config.js $(BUILD_PARAMS); \ +ifdef UNIT_TEST + $(COMMON_DIR)/list_libraries.js $(MODULES_DIR) --all --makelib=$(MAKE_LIB) 1>/dev/null # Check errors + for f in `$(COMMON_DIR)/list_libraries.js $(MODULES_DIR) --all --makelib=$(MAKE_LIB)`; do \ + NAME=$${f}Lib \ + PATH="$(NODE_MODULES_DEV)/.bin/:$(PATH)" \ + DIRS=$(LIBS_DIR) \ + FILENAME=$${f}.js \ + OUTPUT=$(BUILD_DIR)/$${f}.js \ + rollup --config $(COMMON_DIR)/rollup.config.js $(BUILD_PARAMS); \ + done +else + $(COMMON_DIR)/list_libraries.js $(MODULES_DIRS) --diff="$(diff)" --modules=$(modules) --functions=$(functions) --nodeps=$(nodeps) --makelib=$(MAKE_LIB) 1>/dev/null # Check errors + for f in `$(COMMON_DIR)/list_libraries.js $(MODULES_DIRS) --diff="$(diff)" --modules=$(modules) --functions=$(functions) --nodeps=$(nodeps) --makelib=$(MAKE_LIB)`; do \ + NAME=$${f}Lib \ + PATH="$(NODE_MODULES_DEV)/.bin/:$(PATH)" \ + DIRS=$(LIBS_DIRS) \ + FILENAME=$${f}.js \ + OUTPUT=$(BUILD_DIR)/$${f}.js \ + rollup --config $(COMMON_DIR)/rollup.config.js $(BUILD_PARAMS); \ + done +endif build-install: for d in $(shell echo $(LIBS_DIRS) | tr "," "\n"); do \ @@ -59,7 +74,10 @@ build-install: deploy: check build echo "Deploying libraries..." - $(GSUTIL) cp -r $(BUILD_DIR)/index.js $(BQ_LIBRARY_BUCKET) + $(COMMON_DIR)/list_libraries.js $(MODULES_DIRS) --diff="$(diff)" --modules=$(modules) --functions=$(functions) --nodeps=$(nodeps) --makelib=$(MAKE_LIB) 1>/dev/null # Check errors + for f in `$(COMMON_DIR)/list_libraries.js $(MODULES_DIRS) --diff="$(diff)" --modules=$(modules) --functions=$(functions) --nodeps=$(nodeps) --makelib=$(MAKE_LIB)`; do \ + $(GSUTIL) cp -r $(BUILD_DIR)/$${f}.js $(BQ_LIBRARY_BUCKET)_$${f}.js; \ + done # Provisional for WASM version of tiler ifdef MAKE_LIB echo "Deploying tiler library..." diff --git a/clouds/bigquery/libraries/javascript/src/accessors.js b/clouds/bigquery/libraries/javascript/libs/accessors.js similarity index 100% rename from clouds/bigquery/libraries/javascript/src/accessors.js rename to clouds/bigquery/libraries/javascript/libs/accessors.js diff --git a/clouds/bigquery/libraries/javascript/src/clustering.js b/clouds/bigquery/libraries/javascript/libs/clustering.js similarity index 100% rename from clouds/bigquery/libraries/javascript/src/clustering.js rename to clouds/bigquery/libraries/javascript/libs/clustering.js diff --git a/clouds/bigquery/libraries/javascript/src/constructors.js b/clouds/bigquery/libraries/javascript/libs/constructors.js similarity index 100% rename from clouds/bigquery/libraries/javascript/src/constructors.js rename to clouds/bigquery/libraries/javascript/libs/constructors.js diff --git a/clouds/bigquery/libraries/javascript/src/h3.js b/clouds/bigquery/libraries/javascript/libs/h3.js similarity index 100% rename from clouds/bigquery/libraries/javascript/src/h3.js rename to clouds/bigquery/libraries/javascript/libs/h3.js diff --git a/clouds/bigquery/libraries/javascript/libs/index.js b/clouds/bigquery/libraries/javascript/libs/index.js deleted file mode 100644 index e96a72888..000000000 --- a/clouds/bigquery/libraries/javascript/libs/index.js +++ /dev/null @@ -1,25 +0,0 @@ -import accessors from '../src/accessors'; -import constructors from '../src/constructors'; -import measurements from '../src/measurements'; -import quadkey from '../src/quadkey'; -import s2 from '../src/s2'; -import processing from '../src/processing'; -import transformations from '../src/transformations'; -import h3 from '../src/h3'; -import placekey from '../src/placekey'; -import clustering from '../src/clustering'; -import random from '../src/random'; - -export default { - accessors, - constructors, - measurements, - quadkey, - s2, - processing, - transformations, - h3, - placekey, - clustering, - random -}; \ No newline at end of file diff --git a/clouds/bigquery/libraries/javascript/src/measurements.js b/clouds/bigquery/libraries/javascript/libs/measurements.js similarity index 100% rename from clouds/bigquery/libraries/javascript/src/measurements.js rename to clouds/bigquery/libraries/javascript/libs/measurements.js diff --git a/clouds/bigquery/libraries/javascript/src/placekey.js b/clouds/bigquery/libraries/javascript/libs/placekey.js similarity index 100% rename from clouds/bigquery/libraries/javascript/src/placekey.js rename to clouds/bigquery/libraries/javascript/libs/placekey.js diff --git a/clouds/bigquery/libraries/javascript/src/processing.js b/clouds/bigquery/libraries/javascript/libs/processing.js similarity index 100% rename from clouds/bigquery/libraries/javascript/src/processing.js rename to clouds/bigquery/libraries/javascript/libs/processing.js diff --git a/clouds/bigquery/libraries/javascript/libs/quadkey.js b/clouds/bigquery/libraries/javascript/libs/quadkey.js new file mode 100644 index 000000000..51eb55b18 --- /dev/null +++ b/clouds/bigquery/libraries/javascript/libs/quadkey.js @@ -0,0 +1,25 @@ +import { + bbox, + toParent, + toChildren, + quadkeyFromQuadint, + quadintFromQuadkey, + quadintFromLocation, + quadintToGeoJSON, + quadintFromZXY, + geojsonToQuadints, + ZXYFromQuadint +} from '../src/quadkey'; + +export default { + bbox, + toParent, + toChildren, + quadkeyFromQuadint, + quadintFromQuadkey, + quadintFromLocation, + quadintToGeoJSON, + quadintFromZXY, + geojsonToQuadints, + ZXYFromQuadint +}; \ No newline at end of file diff --git a/clouds/bigquery/libraries/javascript/libs/random.js b/clouds/bigquery/libraries/javascript/libs/random.js new file mode 100644 index 000000000..70dcd1cc6 --- /dev/null +++ b/clouds/bigquery/libraries/javascript/libs/random.js @@ -0,0 +1,8 @@ +import { + generateRandomPointsInPolygon + +} from '../src/random'; + +export default { + generateRandomPointsInPolygon +}; \ No newline at end of file diff --git a/clouds/bigquery/libraries/javascript/libs/s2.js b/clouds/bigquery/libraries/javascript/libs/s2.js new file mode 100644 index 000000000..229bb13f1 --- /dev/null +++ b/clouds/bigquery/libraries/javascript/libs/s2.js @@ -0,0 +1,9 @@ +import { S2 } from '../src/s2'; + +export default { + keyToId: S2.keyToId, + idToKey: S2.idToKey, + latLngToKey: S2.latLngToKey, + FromHilbertQuadKey: S2.S2Cell.FromHilbertQuadKey, + idToLatLng: S2.idToLatLng +}; \ No newline at end of file diff --git a/clouds/bigquery/libraries/javascript/src/transformations.js b/clouds/bigquery/libraries/javascript/libs/transformations.js similarity index 100% rename from clouds/bigquery/libraries/javascript/src/transformations.js rename to clouds/bigquery/libraries/javascript/libs/transformations.js diff --git a/clouds/bigquery/libraries/javascript/src/quadkey.js b/clouds/bigquery/libraries/javascript/src/quadkey.js index f74df5b5c..8ea5e5d7e 100644 --- a/clouds/bigquery/libraries/javascript/src/quadkey.js +++ b/clouds/bigquery/libraries/javascript/src/quadkey.js @@ -1,25 +1,310 @@ -import { - bbox, - toParent, - toChildren, - quadkeyFromQuadint, - quadintFromQuadkey, - quadintFromLocation, - quadintToGeoJSON, - quadintFromZXY, - geojsonToQuadints, - ZXYFromQuadint -} from './quadkey/index'; - -export default { - bbox, - toParent, - toChildren, - quadkeyFromQuadint, - quadintFromQuadkey, - quadintFromLocation, - quadintToGeoJSON, - quadintFromZXY, - geojsonToQuadints, - ZXYFromQuadint -}; \ No newline at end of file +// ---------------------------- +// -- Copyright (C) 2021 CARTO +// ---------------------------- + +import tilebelt from '@mapbox/tilebelt'; +import tilecover from '@mapbox/tile-cover'; + +/** + * convert tile coordinates to quadint at specific zoom level + * @param {zxycoord} zxy zoom and tile coordinates + * @return {int} quadint for input tile coordinates at input zoom level + */ +export function quadintFromZXY (z, x, y) { + if (z < 0 || z > 29) { + throw new Error('Wrong zoom'); + } + const zI = z; + if (zI <= 13) { + let quadint = y; + quadint <<= zI; + quadint |= x; + quadint <<= 5; + quadint |= zI; + return quadint; + } + let quadint = BigInt(y); + quadint <<= BigInt(z); + quadint |= BigInt(x); + quadint <<= BigInt(5); + quadint |= BigInt(z); + return quadint; +} + +/** + * convert quadint to tile coordinates and level of zoom + * @param {int} quadint quadint to be converted + * @return {zxycoord} level of zoom and tile coordinates + */ +export function ZXYFromQuadint (quadint) { + const quadintBig = BigInt(quadint); + const z = quadintBig & BigInt(0x1F); + if (z <= 13n) { + const zNumber = Number(z); + const x = (quadint >> 5) & ((1 << zNumber) - 1); + const y = quadint >> (zNumber + 5); + return { z: zNumber, x: x, y: y }; + } + const x = (quadintBig >> (5n)) & ((1n << z) - 1n); + const y = quadintBig >> (5n + z); + return { z: Number(z), x: Number(x), y: Number(y) }; +} + +/** + * get quadint for location at specific zoom level + * @param {geocoord} location location coordinates to convert to quadint + * @param {number} zoom map zoom level of quadint to return + * @return {string} quadint the input location resides in for the input zoom level + */ +export function quadintFromLocation (long, lat, zoom) { + if (zoom < 0 || zoom > 29) { + throw new Error('Wrong zoom'); + } + lat = clipNumber(lat, -85.05, 85.05); + const tile = tilebelt.pointToTile(long, lat, zoom); + return quadintFromZXY(zoom, tile[0], tile[1]); +} + +/** + * convert quadkey into a quadint + * @param {string} quadkey quadkey to be converted + * @return {int} quadint + */ +export function quadintFromQuadkey (quadkey) { + const z = quadkey.length; + const tile = tilebelt.quadkeyToTile(quadkey); + return quadintFromZXY(z, tile[0], tile[1]); +} + +/** + * convert quadint into a quadkey + * @param {int} quadint quadint to be converted + * @return {string} quadkey + */ +export function quadkeyFromQuadint (quadint) { + const tile = ZXYFromQuadint(quadint); + return tilebelt.tileToQuadkey([tile.x, tile.y, tile.z]); +} + +/** + * get the bounding box for a quadint in location coordinates + * @param {int} quadint quadint to get bounding box from + * @return {bbox} bounding box for the input quadint + */ +export function bbox (quadint) { + const tile = ZXYFromQuadint(quadint); + return tilebelt.tileToBBOX([tile.x, tile.y, tile.z]); +} + +/** + * get the GeoJSON with the bounding box for a quadint in location coordinates + * @param {int} quadint quadint to get bounding box from + * @return {GeoJSON} GeoJSON with the bounding box for the input quadint + */ +export function quadintToGeoJSON (quadint) { + const tile = ZXYFromQuadint(quadint); + return tilebelt.tileToGeoJSON([tile.x, tile.y, tile.z]); +} + +/** + * returns the sibling of the given quadint and will wrap + * @param {int} quadint key to get sibling of + * @param {string} direction direction of sibling from key + * @return {int} sibling key + */ +export function sibling (quadint, direction) { + direction = direction.toLowerCase(); + if (direction !== 'left' && direction !== 'right' && direction !== 'up' && direction !== 'down') { + throw new Error('Wrong direction argument passed to sibling'); + } + if (direction === 'left') { + return siblingLeft(quadint); + } + if (direction === 'right') { + return siblingRight(quadint); + } + if (direction === 'up') { + return siblingUp(quadint); + } + if (direction === 'down') { + return siblingDown(quadint); + } +} + +/** + * returns the sibling of the given quadint and will wrap + * @param {int} quadint key to get sibling of + * @param {string} direction direction of sibling from key + * @return {int} sibling key + */ +export function siblingLeft (quadint) { + const tile = ZXYFromQuadint(quadint); + const tilesPerLevel = 2 << (tile.z - 1); + const x = tile.x > 0 ? tile.x - 1 : tilesPerLevel - 1; + return quadintFromZXY(tile.z, x, tile.y); +} + +/** + * returns the sibling of the given quadint and will wrap + * @param {int} quadint key to get sibling of + * @param {string} direction direction of sibling from key + * @return {int} sibling key + */ +export function siblingRight (quadint) { + const tile = ZXYFromQuadint(quadint); + const tilesPerLevel = 2 << (tile.z - 1); + const x = tile.x < tilesPerLevel - 1 ? tile.x + 1 : 0; + return quadintFromZXY(tile.z, x, tile.y); +} + +/** + * returns the sibling of the given quadint and will wrap + * @param {int} quadint key to get sibling of + * @param {string} direction direction of sibling from key + * @return {int} sibling key + */ +export function siblingUp (quadint) { + const tile = ZXYFromQuadint(quadint); + const tilesPerLevel = 2 << (tile.z - 1); + const y = tile.y > 0 ? tile.y - 1 : tilesPerLevel - 1; + return quadintFromZXY(tile.z, tile.x, y); +} + +/** + * returns the sibling of the given quadint and will wrap + * @param {int} quadint key to get sibling of + * @param {string} direction direction of sibling from key + * @return {int} sibling key + */ +export function siblingDown (quadint) { + const tile = ZXYFromQuadint(quadint); + const tilesPerLevel = 2 << (tile.z - 1); + const y = tile.y < tilesPerLevel - 1 ? tile.y + 1 : 0; + return quadintFromZXY(tile.z, tile.x, y); +} + +/** + * get all the children quadints of a quadint + * @param {int} quadint quadint to get the children of + * @param {int} resolution resolution of the desired children + * @return {array} array of quadints representing the children of the input quadint + */ +export function toChildren (quadint, resolution) { + const zxy = ZXYFromQuadint(quadint); + if (zxy.z < 0 || zxy.z > 28) { + throw new Error('Wrong quadint zoom'); + } + + if (resolution < 0 || resolution <= zxy.z) { + throw new Error('Wrong resolution'); + } + const diffZ = resolution - zxy.z; + const mask = (1 << diffZ) - 1; + const minTileX = zxy.x << diffZ; + const maxTileX = minTileX | mask; + const minTileY = zxy.y << diffZ; + const maxTileY = minTileY | mask; + const children = []; + let x, y; + for (x = minTileX; x <= maxTileX; x++) { + for (y = minTileY; y <= maxTileY; y++) { + children.push(quadintFromZXY(resolution, x, y)); + } + } + return children; +} + +/** + * get the parent of a quadint + * @param {int} quadint quadint to get the parent of + * @param {int} resolution resolution of the desired parent + * @return {int} parent of the input quadint + */ +export function toParent (quadint, resolution) { + const zxy = ZXYFromQuadint(quadint); + if (zxy.z < 1 || zxy.z > 29) { + throw new Error('Wrong quadint zoom'); + } + if (resolution < 0 || resolution >= zxy.z) { + throw new Error('Wrong resolution'); + } + return quadintFromZXY(resolution, zxy.x >> (zxy.z - resolution), zxy.y >> (zxy.z - resolution)); +} + +/** + * get the kring of a quadint + * @param {int} origin quadint to get the kring of + * @param {int} size in tiles of the desired kring + * @return {int} kring of the input quadint + */ +export function kRing (origin, size) { + if (size === 0) { + return [origin.toString()]; + } + + let i, j; + let cornerQuadint = origin; + // Traverse to top left corner + for (i = 0; i < size; i++) { + cornerQuadint = siblingLeft(cornerQuadint); + cornerQuadint = siblingUp(cornerQuadint) + } + + const neighbors = []; + let traversalQuadint; + for (j = 0; j < size * 2 + 1; j++) { + traversalQuadint = cornerQuadint; + for (i = 0; i < size * 2 + 1; i++) { + neighbors.push(traversalQuadint.toString()); + traversalQuadint = siblingRight(traversalQuadint); + } + cornerQuadint = siblingDown(cornerQuadint) + } + return neighbors; +} + +/** + * get the kring distances of a quadint + * @param {int} origin quadint to get the kring of + * @param {int} size in tiles of the desired kring + * @return {int} kring distances of the input quadint + */ +export function kRingDistances (origin, size) { + if (size === 0) { + return [{ index: origin.toString(), distance: 0 }]; + } + + let cornerQuadint = origin; + // Traverse to top left corner + for (let i = 0; i < size; i++) { + cornerQuadint = siblingLeft(cornerQuadint); + cornerQuadint = siblingUp(cornerQuadint) + } + + const neighbors = []; + let traversalQuadint; + for (let j = -size; j <= size; j++) { + traversalQuadint = cornerQuadint; + for (let i = -size; i <= size; i++) { + neighbors.push({ + index: traversalQuadint.toString(), + distance: Math.max(Math.abs(i), Math.abs(j)) // Chebychev distance + }); + traversalQuadint = siblingRight(traversalQuadint); + } + cornerQuadint = siblingDown(cornerQuadint) + } + return neighbors.sort((a, b) => (a['distance'] > b['distance']) ? 1 : -1); +} + +/** + * get an array of quadints containing a geography for given zooms + * @param {object} poly geography we want to extract the quadints from + * @param {struct} limits struct containing the range of zooms + * @return {array} array of quadints containing a geography + */ +export function geojsonToQuadints (poly, limits) { + return tilecover.tiles(poly, limits).map(tile => quadintFromZXY(tile[2], tile[0], tile[1])); +} + +const clipNumber = (num, a, b) => Math.max(Math.min(num, Math.max(a, b)), Math.min(a, b)); \ No newline at end of file diff --git a/clouds/bigquery/libraries/javascript/src/quadkey/index.js b/clouds/bigquery/libraries/javascript/src/quadkey/index.js deleted file mode 100644 index 8ea5e5d7e..000000000 --- a/clouds/bigquery/libraries/javascript/src/quadkey/index.js +++ /dev/null @@ -1,310 +0,0 @@ -// ---------------------------- -// -- Copyright (C) 2021 CARTO -// ---------------------------- - -import tilebelt from '@mapbox/tilebelt'; -import tilecover from '@mapbox/tile-cover'; - -/** - * convert tile coordinates to quadint at specific zoom level - * @param {zxycoord} zxy zoom and tile coordinates - * @return {int} quadint for input tile coordinates at input zoom level - */ -export function quadintFromZXY (z, x, y) { - if (z < 0 || z > 29) { - throw new Error('Wrong zoom'); - } - const zI = z; - if (zI <= 13) { - let quadint = y; - quadint <<= zI; - quadint |= x; - quadint <<= 5; - quadint |= zI; - return quadint; - } - let quadint = BigInt(y); - quadint <<= BigInt(z); - quadint |= BigInt(x); - quadint <<= BigInt(5); - quadint |= BigInt(z); - return quadint; -} - -/** - * convert quadint to tile coordinates and level of zoom - * @param {int} quadint quadint to be converted - * @return {zxycoord} level of zoom and tile coordinates - */ -export function ZXYFromQuadint (quadint) { - const quadintBig = BigInt(quadint); - const z = quadintBig & BigInt(0x1F); - if (z <= 13n) { - const zNumber = Number(z); - const x = (quadint >> 5) & ((1 << zNumber) - 1); - const y = quadint >> (zNumber + 5); - return { z: zNumber, x: x, y: y }; - } - const x = (quadintBig >> (5n)) & ((1n << z) - 1n); - const y = quadintBig >> (5n + z); - return { z: Number(z), x: Number(x), y: Number(y) }; -} - -/** - * get quadint for location at specific zoom level - * @param {geocoord} location location coordinates to convert to quadint - * @param {number} zoom map zoom level of quadint to return - * @return {string} quadint the input location resides in for the input zoom level - */ -export function quadintFromLocation (long, lat, zoom) { - if (zoom < 0 || zoom > 29) { - throw new Error('Wrong zoom'); - } - lat = clipNumber(lat, -85.05, 85.05); - const tile = tilebelt.pointToTile(long, lat, zoom); - return quadintFromZXY(zoom, tile[0], tile[1]); -} - -/** - * convert quadkey into a quadint - * @param {string} quadkey quadkey to be converted - * @return {int} quadint - */ -export function quadintFromQuadkey (quadkey) { - const z = quadkey.length; - const tile = tilebelt.quadkeyToTile(quadkey); - return quadintFromZXY(z, tile[0], tile[1]); -} - -/** - * convert quadint into a quadkey - * @param {int} quadint quadint to be converted - * @return {string} quadkey - */ -export function quadkeyFromQuadint (quadint) { - const tile = ZXYFromQuadint(quadint); - return tilebelt.tileToQuadkey([tile.x, tile.y, tile.z]); -} - -/** - * get the bounding box for a quadint in location coordinates - * @param {int} quadint quadint to get bounding box from - * @return {bbox} bounding box for the input quadint - */ -export function bbox (quadint) { - const tile = ZXYFromQuadint(quadint); - return tilebelt.tileToBBOX([tile.x, tile.y, tile.z]); -} - -/** - * get the GeoJSON with the bounding box for a quadint in location coordinates - * @param {int} quadint quadint to get bounding box from - * @return {GeoJSON} GeoJSON with the bounding box for the input quadint - */ -export function quadintToGeoJSON (quadint) { - const tile = ZXYFromQuadint(quadint); - return tilebelt.tileToGeoJSON([tile.x, tile.y, tile.z]); -} - -/** - * returns the sibling of the given quadint and will wrap - * @param {int} quadint key to get sibling of - * @param {string} direction direction of sibling from key - * @return {int} sibling key - */ -export function sibling (quadint, direction) { - direction = direction.toLowerCase(); - if (direction !== 'left' && direction !== 'right' && direction !== 'up' && direction !== 'down') { - throw new Error('Wrong direction argument passed to sibling'); - } - if (direction === 'left') { - return siblingLeft(quadint); - } - if (direction === 'right') { - return siblingRight(quadint); - } - if (direction === 'up') { - return siblingUp(quadint); - } - if (direction === 'down') { - return siblingDown(quadint); - } -} - -/** - * returns the sibling of the given quadint and will wrap - * @param {int} quadint key to get sibling of - * @param {string} direction direction of sibling from key - * @return {int} sibling key - */ -export function siblingLeft (quadint) { - const tile = ZXYFromQuadint(quadint); - const tilesPerLevel = 2 << (tile.z - 1); - const x = tile.x > 0 ? tile.x - 1 : tilesPerLevel - 1; - return quadintFromZXY(tile.z, x, tile.y); -} - -/** - * returns the sibling of the given quadint and will wrap - * @param {int} quadint key to get sibling of - * @param {string} direction direction of sibling from key - * @return {int} sibling key - */ -export function siblingRight (quadint) { - const tile = ZXYFromQuadint(quadint); - const tilesPerLevel = 2 << (tile.z - 1); - const x = tile.x < tilesPerLevel - 1 ? tile.x + 1 : 0; - return quadintFromZXY(tile.z, x, tile.y); -} - -/** - * returns the sibling of the given quadint and will wrap - * @param {int} quadint key to get sibling of - * @param {string} direction direction of sibling from key - * @return {int} sibling key - */ -export function siblingUp (quadint) { - const tile = ZXYFromQuadint(quadint); - const tilesPerLevel = 2 << (tile.z - 1); - const y = tile.y > 0 ? tile.y - 1 : tilesPerLevel - 1; - return quadintFromZXY(tile.z, tile.x, y); -} - -/** - * returns the sibling of the given quadint and will wrap - * @param {int} quadint key to get sibling of - * @param {string} direction direction of sibling from key - * @return {int} sibling key - */ -export function siblingDown (quadint) { - const tile = ZXYFromQuadint(quadint); - const tilesPerLevel = 2 << (tile.z - 1); - const y = tile.y < tilesPerLevel - 1 ? tile.y + 1 : 0; - return quadintFromZXY(tile.z, tile.x, y); -} - -/** - * get all the children quadints of a quadint - * @param {int} quadint quadint to get the children of - * @param {int} resolution resolution of the desired children - * @return {array} array of quadints representing the children of the input quadint - */ -export function toChildren (quadint, resolution) { - const zxy = ZXYFromQuadint(quadint); - if (zxy.z < 0 || zxy.z > 28) { - throw new Error('Wrong quadint zoom'); - } - - if (resolution < 0 || resolution <= zxy.z) { - throw new Error('Wrong resolution'); - } - const diffZ = resolution - zxy.z; - const mask = (1 << diffZ) - 1; - const minTileX = zxy.x << diffZ; - const maxTileX = minTileX | mask; - const minTileY = zxy.y << diffZ; - const maxTileY = minTileY | mask; - const children = []; - let x, y; - for (x = minTileX; x <= maxTileX; x++) { - for (y = minTileY; y <= maxTileY; y++) { - children.push(quadintFromZXY(resolution, x, y)); - } - } - return children; -} - -/** - * get the parent of a quadint - * @param {int} quadint quadint to get the parent of - * @param {int} resolution resolution of the desired parent - * @return {int} parent of the input quadint - */ -export function toParent (quadint, resolution) { - const zxy = ZXYFromQuadint(quadint); - if (zxy.z < 1 || zxy.z > 29) { - throw new Error('Wrong quadint zoom'); - } - if (resolution < 0 || resolution >= zxy.z) { - throw new Error('Wrong resolution'); - } - return quadintFromZXY(resolution, zxy.x >> (zxy.z - resolution), zxy.y >> (zxy.z - resolution)); -} - -/** - * get the kring of a quadint - * @param {int} origin quadint to get the kring of - * @param {int} size in tiles of the desired kring - * @return {int} kring of the input quadint - */ -export function kRing (origin, size) { - if (size === 0) { - return [origin.toString()]; - } - - let i, j; - let cornerQuadint = origin; - // Traverse to top left corner - for (i = 0; i < size; i++) { - cornerQuadint = siblingLeft(cornerQuadint); - cornerQuadint = siblingUp(cornerQuadint) - } - - const neighbors = []; - let traversalQuadint; - for (j = 0; j < size * 2 + 1; j++) { - traversalQuadint = cornerQuadint; - for (i = 0; i < size * 2 + 1; i++) { - neighbors.push(traversalQuadint.toString()); - traversalQuadint = siblingRight(traversalQuadint); - } - cornerQuadint = siblingDown(cornerQuadint) - } - return neighbors; -} - -/** - * get the kring distances of a quadint - * @param {int} origin quadint to get the kring of - * @param {int} size in tiles of the desired kring - * @return {int} kring distances of the input quadint - */ -export function kRingDistances (origin, size) { - if (size === 0) { - return [{ index: origin.toString(), distance: 0 }]; - } - - let cornerQuadint = origin; - // Traverse to top left corner - for (let i = 0; i < size; i++) { - cornerQuadint = siblingLeft(cornerQuadint); - cornerQuadint = siblingUp(cornerQuadint) - } - - const neighbors = []; - let traversalQuadint; - for (let j = -size; j <= size; j++) { - traversalQuadint = cornerQuadint; - for (let i = -size; i <= size; i++) { - neighbors.push({ - index: traversalQuadint.toString(), - distance: Math.max(Math.abs(i), Math.abs(j)) // Chebychev distance - }); - traversalQuadint = siblingRight(traversalQuadint); - } - cornerQuadint = siblingDown(cornerQuadint) - } - return neighbors.sort((a, b) => (a['distance'] > b['distance']) ? 1 : -1); -} - -/** - * get an array of quadints containing a geography for given zooms - * @param {object} poly geography we want to extract the quadints from - * @param {struct} limits struct containing the range of zooms - * @return {array} array of quadints containing a geography - */ -export function geojsonToQuadints (poly, limits) { - return tilecover.tiles(poly, limits).map(tile => quadintFromZXY(tile[2], tile[0], tile[1])); -} - -const clipNumber = (num, a, b) => Math.max(Math.min(num, Math.max(a, b)), Math.min(a, b)); \ No newline at end of file diff --git a/clouds/bigquery/libraries/javascript/src/random.js b/clouds/bigquery/libraries/javascript/src/random.js index fde5772e1..4326c343b 100644 --- a/clouds/bigquery/libraries/javascript/src/random.js +++ b/clouds/bigquery/libraries/javascript/src/random.js @@ -1,6 +1,6 @@ import { bbox, booleanPointInPolygon, randomPoint } from '@turf/turf'; -function generateRandomPointsInPolygon (polygon, numPoints) { +export function generateRandomPointsInPolygon (polygon, numPoints) { const randomPoints = []; while (randomPoints.length < numPoints) { const point = randomPoint(1, { bbox: bbox(polygon) }).features[0]; @@ -9,16 +9,4 @@ function generateRandomPointsInPolygon (polygon, numPoints) { } } return randomPoints; -} - -function generateRandomPointInPolygon (polygon) { - let point - do { - point = randomPoint(1, { bbox: bbox(polygon) }).features[0]; - } while (!booleanPointInPolygon(point, polygon)) - return JSON.stringify(point.geometry); -} - -export default { - generateRandomPointsInPolygon -}; \ No newline at end of file +} \ No newline at end of file diff --git a/clouds/bigquery/libraries/javascript/src/s2.js b/clouds/bigquery/libraries/javascript/src/s2.js index a4590d8eb..3b28bd5cb 100644 --- a/clouds/bigquery/libraries/javascript/src/s2.js +++ b/clouds/bigquery/libraries/javascript/src/s2.js @@ -1,9 +1,596 @@ -import { S2 } from './s2/index'; - -export default { - keyToId: S2.keyToId, - idToKey: S2.idToKey, - latLngToKey: S2.latLngToKey, - FromHilbertQuadKey: S2.S2Cell.FromHilbertQuadKey, - idToLatLng: S2.idToLatLng -}; \ No newline at end of file +/* eslint-disable */ +/// S2 Geometry functions +// the regional scoreboard is based on a level 6 S2 Cell +// - https://docs.google.com/presentation/d/1Hl4KapfAENAOf4gv-pSngKwvS_jwNVHRPZTTDzXXn6Q/view?pli=1#slide=id.i22 +// at the time of writing there's no actual API for the intel map to retrieve scoreboard data, +// but it's still useful to plot the score cells on the intel map + + +// the S2 geometry is based on projecting the earth sphere onto a cube, with some scaling of face coordinates to +// keep things close to approximate equal area for adjacent cells +// to convert a lat,lng into a cell id: +// - convert lat,lng to x,y,z +// - convert x,y,z into face,u,v +// - u,v scaled to s,t with quadratic formula +// - s,t converted to integer i,j offsets +// - i,j converted to a position along a Hubbert space-filling curve +// - combine face,position to get the cell id + +//NOTE: compared to the google S2 geometry library, we vary from their code in the following ways +// - cell IDs: they combine face and the hilbert curve position into a single 64 bit number. this gives efficient space +// and speed. javascript doesn't have appropriate data types, and speed is not cricical, so we use +// as [face,[bitpair,bitpair,...]] instead +// - i,j: they always use 30 bits, adjusting as needed. we use 0 to (1< temp[1]) { + if (temp[0] > temp[2]) { + return 0; + } else { + return 2; + } + } else { + if (temp[1] > temp[2]) { + return 1; + } else { + return 2; + } + } + +}; + +var faceXYZToUV = function(face,xyz) { + var u,v; + + switch (face) { + case 0: u = xyz[1]/xyz[0]; v = xyz[2]/xyz[0]; break; + case 1: u = -xyz[0]/xyz[1]; v = xyz[2]/xyz[1]; break; + case 2: u = -xyz[0]/xyz[2]; v = -xyz[1]/xyz[2]; break; + case 3: u = xyz[2]/xyz[0]; v = xyz[1]/xyz[0]; break; + case 4: u = xyz[2]/xyz[1]; v = -xyz[0]/xyz[1]; break; + case 5: u = -xyz[1]/xyz[2]; v = -xyz[0]/xyz[2]; break; + default: throw {error: 'Invalid face'}; + } + + return [u,v]; +}; + + + + +S2.XYZToFaceUV = function(xyz) { + var face = largestAbsComponent(xyz); + + if (xyz[face] < 0) { + face += 3; + } + + var uv = faceXYZToUV (face,xyz); + + return [face, uv]; +}; + +S2.FaceUVToXYZ = function(face,uv) { + var u = uv[0]; + var v = uv[1]; + + switch (face) { + case 0: return [ 1, u, v]; + case 1: return [-u, 1, v]; + case 2: return [-u,-v, 1]; + case 3: return [-1,-v,-u]; + case 4: return [ v,-1,-u]; + case 5: return [ v, u,-1]; + default: throw {error: 'Invalid face'}; + } +}; + +var singleSTtoUV = function(st) { + if (st >= 0.5) { + return (1/3.0) * (4*st*st - 1); + } else { + return (1/3.0) * (1 - (4*(1-st)*(1-st))); + } +}; + +S2.STToUV = function(st) { + return [singleSTtoUV(st[0]), singleSTtoUV(st[1])]; +}; + + +var singleUVtoST = function(uv) { + if (uv >= 0) { + return 0.5 * Math.sqrt (1 + 3*uv); + } else { + return 1 - 0.5 * Math.sqrt (1 - 3*uv); + } +}; +S2.UVToST = function(uv) { + return [singleUVtoST(uv[0]), singleUVtoST(uv[1])]; +}; + + +S2.STToIJ = function(st,order) { + var maxSize = (1<=0; i--) { + + var mask = 1<= 0; i--) { + + level = maxLevel - i; + bit = position[i]; + rx = 0; + ry = 0; + if (bit === '1') { + ry = 1; + } + else if (bit === '2') { + rx = 1; + ry = 1; + } + else if (bit === '3') { + rx = 1; + } + + val = Math.pow(2, level - 1); + rotateAndFlipQuadrant(val, point, rx, ry); + + point.x += val * rx; + point.y += val * ry; + + } + + if (face % 2 === 1) { + var t = point.x; + point.x = point.y; + point.y = t; + } + + + return S2.S2Cell.FromFaceIJ(parseInt(face), [point.x, point.y], level); +}; + +//static method to construct +S2.S2Cell.FromLatLng = function(latLng, level) { + if ((!latLng.lat && latLng.lat !== 0) || (!latLng.lng && latLng.lng !== 0)) { + throw new Error("Pass { lat: lat, lng: lng } to S2.S2Cell.FromLatLng"); + } + var xyz = S2.LatLngToXYZ(latLng); + + var faceuv = S2.XYZToFaceUV(xyz); + var st = S2.UVToST(faceuv[1]); + + var ij = S2.STToIJ(st,level); + + return S2.S2Cell.FromFaceIJ (faceuv[0], ij, level); +}; + +/* +S2.faceIjLevelToXyz = function (face, ij, level) { + var st = S2.IJToST(ij, level, [0.5, 0.5]); + var uv = S2.STToUV(st); + var xyz = S2.FaceUVToXYZ(face, uv); + + return S2.XYZToLatLng(xyz); + return xyz; +}; +*/ + +S2.S2Cell.FromFaceIJ = function(face,ij,level) { + var cell = new S2.S2Cell(); + cell.face = face; + cell.ij = ij; + cell.level = level; + + return cell; +}; + + +S2.S2Cell.prototype.toString = function() { + return 'F'+this.face+'ij['+this.ij[0]+','+this.ij[1]+']@'+this.level; +}; + +S2.S2Cell.prototype.getLatLng = function() { + var st = S2.IJToST(this.ij,this.level, [0.5,0.5]); + var uv = S2.STToUV(st); + var xyz = S2.FaceUVToXYZ(this.face, uv); + + return S2.XYZToLatLng(xyz); +}; + +S2.S2Cell.prototype.getCornerLatLngs = function() { + var result = []; + var offsets = [ + [ 0.0, 0.0 ], + [ 0.0, 1.0 ], + [ 1.0, 1.0 ], + [ 1.0, 0.0 ] + ]; + + for (var i=0; i<4; i++) { + var st = S2.IJToST(this.ij, this.level, offsets[i]); + var uv = S2.STToUV(st); + var xyz = S2.FaceUVToXYZ(this.face, uv); + + result.push ( S2.XYZToLatLng(xyz) ); + } + return result; +}; + + +S2.S2Cell.prototype.getFaceAndQuads = function () { + var quads = pointToHilbertQuadList(this.ij[0], this.ij[1], this.level, this.face); + + return [this.face,quads]; +}; +S2.S2Cell.prototype.toHilbertQuadkey = function () { + var quads = pointToHilbertQuadList(this.ij[0], this.ij[1], this.level, this.face); + + return this.face.toString(10) + '/' + quads.join(''); +}; + +S2.latLngToNeighborKeys = S2.S2Cell.latLngToNeighborKeys = function (lat, lng, level) { + return S2.S2Cell.FromLatLng({ lat: lat, lng: lng }, level).getNeighbors().map(function (cell) { + return cell.toHilbertQuadkey(); + }); +}; +S2.S2Cell.prototype.getNeighbors = function() { + + var fromFaceIJWrap = function(face,ij,level) { + var maxSize = (1<=0 && ij[1]>=0 && ij[0] levelN) { + posS = posS.substr(0, levelN); + } + + // 3-bit face value + faceB = Long.fromString(faceN.toString(10), true, 10).toString(2); + while (faceB.length < S2.FACE_BITS) { + faceB = '0' + faceB; + } + + // 60-bit position value + posB = Long.fromString(posS, true, 4).toString(2); + while (posB.length < (2 * levelN)) { + posB = '0' + posB; + } + + bin = faceB + posB; + // 1-bit lsb marker + bin += '1'; + // n-bit padding to 64-bits + while (bin.length < (S2.FACE_BITS + S2.POS_BITS)) { + bin += '0'; + } + + return Long.fromString(bin, true, 2).toSigned().toString(10); +}; + +S2.keyToId = S2.S2Cell.keyToId += S2.toId = S2.toCellId = S2.fromKey += function (key) { + var parts = key.split('/'); + + return S2.fromFacePosLevel(parts[0], parts[1], parts[1].length); +}; + +S2.idToKey = S2.S2Cell.idToKey += S2.S2Cell.toKey = S2.toKey += S2.fromId = S2.fromCellId += S2.S2Cell.toHilbertQuadkey = S2.toHilbertQuadkey += function (idS) { + var Long = exports.dcodeIO && exports.dcodeIO.Long || require('long'); + var bin = Long.fromString(idS, true, 10).toString(2); + + while (bin.length < (S2.FACE_BITS + S2.POS_BITS)) { + bin = '0' + bin; + } + + // MUST come AFTER binstr has been left-padded with '0's + var lsbIndex = bin.lastIndexOf('1'); + // substr(start, len) + // substring(start, end) // includes start, does not include end + var faceB = bin.substring(0, 3); + // posB will always be a multiple of 2 (or it's invalid) + var posB = bin.substring(3, lsbIndex); + var levelN = posB.length / 2; + + var faceS = Long.fromString(faceB, true, 2).toString(10); + var posS = Long.fromString(posB, true, 2).toString(4); + + while (posS.length < levelN) { + posS = '0' + posS; + } + + return faceS + '/' + posS; +}; + +S2.keyToLatLng = S2.S2Cell.keyToLatLng = function (key) { + var cell2 = S2.S2Cell.FromHilbertQuadKey(key); + return cell2.getLatLng(); +}; + +S2.idToLatLng = S2.S2Cell.idToLatLng = function (id) { + var key = S2.idToKey(id); + return S2.keyToLatLng(key); +}; + +S2.S2Cell.latLngToKey = S2.latLngToKey += S2.latLngToQuadkey = function (lat, lng, level) { + if (isNaN(level) || level < 1 || level > 30) { + throw new Error("'level' is not a number between 1 and 30 (but it should be)"); + } + // TODO + // + // S2.idToLatLng(id) + // S2.keyToLatLng(key) + // S2.nextFace(key) // prevent wrapping on nextKey + // S2.prevFace(key) // prevent wrapping on prevKey + // + // .toKeyArray(id) // face,quadtree + // .toKey(id) // hilbert + // .toPoint(id) // ij + // .toId(key) // uint64 (as string) + // .toLong(key) // long.js + // .toLatLng(id) // object? or array?, or string (with comma)? + // + // maybe S2.HQ.x, S2.GPS.x, S2.CI.x? + return S2.S2Cell.FromLatLng({ lat: lat, lng: lng }, level).toHilbertQuadkey(); +}; + +S2.stepKey = function (key, num) { + var Long = exports.dcodeIO && exports.dcodeIO.Long || require('long'); + var parts = key.split('/'); + + var faceS = parts[0]; + var posS = parts[1]; + var level = parts[1].length; + + var posL = Long.fromString(posS, true, 4); + // TODO handle wrapping (0 === pos + 1) + // (only on the 12 edges of the globe) + var otherL; + if (num > 0) { + otherL = posL.add(Math.abs(num)); + } + else if (num < 0) { + otherL = posL.subtract(Math.abs(num)); + } + var otherS = otherL.toString(4); + + if ('0' === otherS) { + console.warning(new Error("face/position wrapping is not yet supported")); + } + + while (otherS.length < level) { + otherS = '0' + otherS; + } + + return faceS + '/' + otherS; +}; + +S2.S2Cell.prevKey = S2.prevKey = function (key) { + return S2.stepKey(key, -1); +}; + +S2.S2Cell.nextKey = S2.nextKey = function (key) { + return S2.stepKey(key, 1); +}; + +})('undefined' !== typeof module ? module.exports : window); +/* eslint-enable */ \ No newline at end of file diff --git a/clouds/bigquery/libraries/javascript/src/s2/index.js b/clouds/bigquery/libraries/javascript/src/s2/index.js deleted file mode 100644 index 3b28bd5cb..000000000 --- a/clouds/bigquery/libraries/javascript/src/s2/index.js +++ /dev/null @@ -1,596 +0,0 @@ -/* eslint-disable */ -/// S2 Geometry functions -// the regional scoreboard is based on a level 6 S2 Cell -// - https://docs.google.com/presentation/d/1Hl4KapfAENAOf4gv-pSngKwvS_jwNVHRPZTTDzXXn6Q/view?pli=1#slide=id.i22 -// at the time of writing there's no actual API for the intel map to retrieve scoreboard data, -// but it's still useful to plot the score cells on the intel map - - -// the S2 geometry is based on projecting the earth sphere onto a cube, with some scaling of face coordinates to -// keep things close to approximate equal area for adjacent cells -// to convert a lat,lng into a cell id: -// - convert lat,lng to x,y,z -// - convert x,y,z into face,u,v -// - u,v scaled to s,t with quadratic formula -// - s,t converted to integer i,j offsets -// - i,j converted to a position along a Hubbert space-filling curve -// - combine face,position to get the cell id - -//NOTE: compared to the google S2 geometry library, we vary from their code in the following ways -// - cell IDs: they combine face and the hilbert curve position into a single 64 bit number. this gives efficient space -// and speed. javascript doesn't have appropriate data types, and speed is not cricical, so we use -// as [face,[bitpair,bitpair,...]] instead -// - i,j: they always use 30 bits, adjusting as needed. we use 0 to (1< temp[1]) { - if (temp[0] > temp[2]) { - return 0; - } else { - return 2; - } - } else { - if (temp[1] > temp[2]) { - return 1; - } else { - return 2; - } - } - -}; - -var faceXYZToUV = function(face,xyz) { - var u,v; - - switch (face) { - case 0: u = xyz[1]/xyz[0]; v = xyz[2]/xyz[0]; break; - case 1: u = -xyz[0]/xyz[1]; v = xyz[2]/xyz[1]; break; - case 2: u = -xyz[0]/xyz[2]; v = -xyz[1]/xyz[2]; break; - case 3: u = xyz[2]/xyz[0]; v = xyz[1]/xyz[0]; break; - case 4: u = xyz[2]/xyz[1]; v = -xyz[0]/xyz[1]; break; - case 5: u = -xyz[1]/xyz[2]; v = -xyz[0]/xyz[2]; break; - default: throw {error: 'Invalid face'}; - } - - return [u,v]; -}; - - - - -S2.XYZToFaceUV = function(xyz) { - var face = largestAbsComponent(xyz); - - if (xyz[face] < 0) { - face += 3; - } - - var uv = faceXYZToUV (face,xyz); - - return [face, uv]; -}; - -S2.FaceUVToXYZ = function(face,uv) { - var u = uv[0]; - var v = uv[1]; - - switch (face) { - case 0: return [ 1, u, v]; - case 1: return [-u, 1, v]; - case 2: return [-u,-v, 1]; - case 3: return [-1,-v,-u]; - case 4: return [ v,-1,-u]; - case 5: return [ v, u,-1]; - default: throw {error: 'Invalid face'}; - } -}; - -var singleSTtoUV = function(st) { - if (st >= 0.5) { - return (1/3.0) * (4*st*st - 1); - } else { - return (1/3.0) * (1 - (4*(1-st)*(1-st))); - } -}; - -S2.STToUV = function(st) { - return [singleSTtoUV(st[0]), singleSTtoUV(st[1])]; -}; - - -var singleUVtoST = function(uv) { - if (uv >= 0) { - return 0.5 * Math.sqrt (1 + 3*uv); - } else { - return 1 - 0.5 * Math.sqrt (1 - 3*uv); - } -}; -S2.UVToST = function(uv) { - return [singleUVtoST(uv[0]), singleUVtoST(uv[1])]; -}; - - -S2.STToIJ = function(st,order) { - var maxSize = (1<=0; i--) { - - var mask = 1<= 0; i--) { - - level = maxLevel - i; - bit = position[i]; - rx = 0; - ry = 0; - if (bit === '1') { - ry = 1; - } - else if (bit === '2') { - rx = 1; - ry = 1; - } - else if (bit === '3') { - rx = 1; - } - - val = Math.pow(2, level - 1); - rotateAndFlipQuadrant(val, point, rx, ry); - - point.x += val * rx; - point.y += val * ry; - - } - - if (face % 2 === 1) { - var t = point.x; - point.x = point.y; - point.y = t; - } - - - return S2.S2Cell.FromFaceIJ(parseInt(face), [point.x, point.y], level); -}; - -//static method to construct -S2.S2Cell.FromLatLng = function(latLng, level) { - if ((!latLng.lat && latLng.lat !== 0) || (!latLng.lng && latLng.lng !== 0)) { - throw new Error("Pass { lat: lat, lng: lng } to S2.S2Cell.FromLatLng"); - } - var xyz = S2.LatLngToXYZ(latLng); - - var faceuv = S2.XYZToFaceUV(xyz); - var st = S2.UVToST(faceuv[1]); - - var ij = S2.STToIJ(st,level); - - return S2.S2Cell.FromFaceIJ (faceuv[0], ij, level); -}; - -/* -S2.faceIjLevelToXyz = function (face, ij, level) { - var st = S2.IJToST(ij, level, [0.5, 0.5]); - var uv = S2.STToUV(st); - var xyz = S2.FaceUVToXYZ(face, uv); - - return S2.XYZToLatLng(xyz); - return xyz; -}; -*/ - -S2.S2Cell.FromFaceIJ = function(face,ij,level) { - var cell = new S2.S2Cell(); - cell.face = face; - cell.ij = ij; - cell.level = level; - - return cell; -}; - - -S2.S2Cell.prototype.toString = function() { - return 'F'+this.face+'ij['+this.ij[0]+','+this.ij[1]+']@'+this.level; -}; - -S2.S2Cell.prototype.getLatLng = function() { - var st = S2.IJToST(this.ij,this.level, [0.5,0.5]); - var uv = S2.STToUV(st); - var xyz = S2.FaceUVToXYZ(this.face, uv); - - return S2.XYZToLatLng(xyz); -}; - -S2.S2Cell.prototype.getCornerLatLngs = function() { - var result = []; - var offsets = [ - [ 0.0, 0.0 ], - [ 0.0, 1.0 ], - [ 1.0, 1.0 ], - [ 1.0, 0.0 ] - ]; - - for (var i=0; i<4; i++) { - var st = S2.IJToST(this.ij, this.level, offsets[i]); - var uv = S2.STToUV(st); - var xyz = S2.FaceUVToXYZ(this.face, uv); - - result.push ( S2.XYZToLatLng(xyz) ); - } - return result; -}; - - -S2.S2Cell.prototype.getFaceAndQuads = function () { - var quads = pointToHilbertQuadList(this.ij[0], this.ij[1], this.level, this.face); - - return [this.face,quads]; -}; -S2.S2Cell.prototype.toHilbertQuadkey = function () { - var quads = pointToHilbertQuadList(this.ij[0], this.ij[1], this.level, this.face); - - return this.face.toString(10) + '/' + quads.join(''); -}; - -S2.latLngToNeighborKeys = S2.S2Cell.latLngToNeighborKeys = function (lat, lng, level) { - return S2.S2Cell.FromLatLng({ lat: lat, lng: lng }, level).getNeighbors().map(function (cell) { - return cell.toHilbertQuadkey(); - }); -}; -S2.S2Cell.prototype.getNeighbors = function() { - - var fromFaceIJWrap = function(face,ij,level) { - var maxSize = (1<=0 && ij[1]>=0 && ij[0] levelN) { - posS = posS.substr(0, levelN); - } - - // 3-bit face value - faceB = Long.fromString(faceN.toString(10), true, 10).toString(2); - while (faceB.length < S2.FACE_BITS) { - faceB = '0' + faceB; - } - - // 60-bit position value - posB = Long.fromString(posS, true, 4).toString(2); - while (posB.length < (2 * levelN)) { - posB = '0' + posB; - } - - bin = faceB + posB; - // 1-bit lsb marker - bin += '1'; - // n-bit padding to 64-bits - while (bin.length < (S2.FACE_BITS + S2.POS_BITS)) { - bin += '0'; - } - - return Long.fromString(bin, true, 2).toSigned().toString(10); -}; - -S2.keyToId = S2.S2Cell.keyToId -= S2.toId = S2.toCellId = S2.fromKey -= function (key) { - var parts = key.split('/'); - - return S2.fromFacePosLevel(parts[0], parts[1], parts[1].length); -}; - -S2.idToKey = S2.S2Cell.idToKey -= S2.S2Cell.toKey = S2.toKey -= S2.fromId = S2.fromCellId -= S2.S2Cell.toHilbertQuadkey = S2.toHilbertQuadkey -= function (idS) { - var Long = exports.dcodeIO && exports.dcodeIO.Long || require('long'); - var bin = Long.fromString(idS, true, 10).toString(2); - - while (bin.length < (S2.FACE_BITS + S2.POS_BITS)) { - bin = '0' + bin; - } - - // MUST come AFTER binstr has been left-padded with '0's - var lsbIndex = bin.lastIndexOf('1'); - // substr(start, len) - // substring(start, end) // includes start, does not include end - var faceB = bin.substring(0, 3); - // posB will always be a multiple of 2 (or it's invalid) - var posB = bin.substring(3, lsbIndex); - var levelN = posB.length / 2; - - var faceS = Long.fromString(faceB, true, 2).toString(10); - var posS = Long.fromString(posB, true, 2).toString(4); - - while (posS.length < levelN) { - posS = '0' + posS; - } - - return faceS + '/' + posS; -}; - -S2.keyToLatLng = S2.S2Cell.keyToLatLng = function (key) { - var cell2 = S2.S2Cell.FromHilbertQuadKey(key); - return cell2.getLatLng(); -}; - -S2.idToLatLng = S2.S2Cell.idToLatLng = function (id) { - var key = S2.idToKey(id); - return S2.keyToLatLng(key); -}; - -S2.S2Cell.latLngToKey = S2.latLngToKey -= S2.latLngToQuadkey = function (lat, lng, level) { - if (isNaN(level) || level < 1 || level > 30) { - throw new Error("'level' is not a number between 1 and 30 (but it should be)"); - } - // TODO - // - // S2.idToLatLng(id) - // S2.keyToLatLng(key) - // S2.nextFace(key) // prevent wrapping on nextKey - // S2.prevFace(key) // prevent wrapping on prevKey - // - // .toKeyArray(id) // face,quadtree - // .toKey(id) // hilbert - // .toPoint(id) // ij - // .toId(key) // uint64 (as string) - // .toLong(key) // long.js - // .toLatLng(id) // object? or array?, or string (with comma)? - // - // maybe S2.HQ.x, S2.GPS.x, S2.CI.x? - return S2.S2Cell.FromLatLng({ lat: lat, lng: lng }, level).toHilbertQuadkey(); -}; - -S2.stepKey = function (key, num) { - var Long = exports.dcodeIO && exports.dcodeIO.Long || require('long'); - var parts = key.split('/'); - - var faceS = parts[0]; - var posS = parts[1]; - var level = parts[1].length; - - var posL = Long.fromString(posS, true, 4); - // TODO handle wrapping (0 === pos + 1) - // (only on the 12 edges of the globe) - var otherL; - if (num > 0) { - otherL = posL.add(Math.abs(num)); - } - else if (num < 0) { - otherL = posL.subtract(Math.abs(num)); - } - var otherS = otherL.toString(4); - - if ('0' === otherS) { - console.warning(new Error("face/position wrapping is not yet supported")); - } - - while (otherS.length < level) { - otherS = '0' + otherS; - } - - return faceS + '/' + otherS; -}; - -S2.S2Cell.prevKey = S2.prevKey = function (key) { - return S2.stepKey(key, -1); -}; - -S2.S2Cell.nextKey = S2.nextKey = function (key) { - return S2.stepKey(key, 1); -}; - -})('undefined' !== typeof module ? module.exports : window); -/* eslint-enable */ \ No newline at end of file diff --git a/clouds/bigquery/libraries/javascript/test/accessors.test.js b/clouds/bigquery/libraries/javascript/test/accessors.test.js new file mode 100644 index 000000000..c233f5ac3 --- /dev/null +++ b/clouds/bigquery/libraries/javascript/test/accessors.test.js @@ -0,0 +1,7 @@ +const accessorsLib = require('../build/accessors'); + +test('accessors library defined', () => { + expect(accessorsLib.featureCollection).toBeDefined(); + expect(accessorsLib.feature).toBeDefined(); + expect(accessorsLib.envelope).toBeDefined(); +}); \ No newline at end of file diff --git a/clouds/bigquery/libraries/javascript/test/clustering.test.js b/clouds/bigquery/libraries/javascript/test/clustering.test.js index 0441d1608..4a2166d60 100644 --- a/clouds/bigquery/libraries/javascript/test/clustering.test.js +++ b/clouds/bigquery/libraries/javascript/test/clustering.test.js @@ -1,8 +1,8 @@ -const lib = require('../build/index'); +const clusteringLib = require('../build/clustering'); test('clustering library defined', () => { - expect(lib.clustering.featureCollection).toBeDefined(); - expect(lib.clustering.feature).toBeDefined(); - expect(lib.clustering.clustersKmeans).toBeDefined(); - expect(lib.clustering.prioritizeDistinctSort).toBeDefined(); + expect(clusteringLib.featureCollection).toBeDefined(); + expect(clusteringLib.feature).toBeDefined(); + expect(clusteringLib.clustersKmeans).toBeDefined(); + expect(clusteringLib.prioritizeDistinctSort).toBeDefined(); }); \ No newline at end of file diff --git a/clouds/bigquery/libraries/javascript/test/constructors.test.js b/clouds/bigquery/libraries/javascript/test/constructors.test.js new file mode 100644 index 000000000..3efd5d989 --- /dev/null +++ b/clouds/bigquery/libraries/javascript/test/constructors.test.js @@ -0,0 +1,6 @@ +const constructorsLib = require('../build/constructors'); + +test('constructors library defined', () => { + expect(constructorsLib.bezierSpline).toBeDefined(); + expect(constructorsLib.ellipse).toBeDefined(); +}); \ No newline at end of file diff --git a/clouds/bigquery/libraries/javascript/test/h3.test.js b/clouds/bigquery/libraries/javascript/test/h3.test.js new file mode 100644 index 000000000..80d1aedf3 --- /dev/null +++ b/clouds/bigquery/libraries/javascript/test/h3.test.js @@ -0,0 +1,17 @@ +const h3Lib = require('../build/h3'); + +test('h3 library defined', () => { + expect(h3Lib.geoToH3).toBeDefined(); + expect(h3Lib.compact).toBeDefined(); + expect(h3Lib.h3Distance).toBeDefined(); + expect(h3Lib.h3IsValid).toBeDefined(); + expect(h3Lib.hexRing).toBeDefined(); + expect(h3Lib.h3IsPentagon).toBeDefined(); + expect(h3Lib.kRing).toBeDefined(); + expect(h3Lib.kRingDistances).toBeDefined(); + expect(h3Lib.polyfill).toBeDefined(); + expect(h3Lib.h3ToGeoBoundary).toBeDefined(); + expect(h3Lib.h3ToChildren).toBeDefined(); + expect(h3Lib.h3ToParent).toBeDefined(); + expect(h3Lib.uncompact).toBeDefined(); +}); \ No newline at end of file diff --git a/clouds/bigquery/libraries/javascript/test/index.test.js b/clouds/bigquery/libraries/javascript/test/index.test.js deleted file mode 100644 index 23d66323c..000000000 --- a/clouds/bigquery/libraries/javascript/test/index.test.js +++ /dev/null @@ -1,81 +0,0 @@ -const lib = require('../build/index'); - -test('accessors library defined', () => { - expect(lib.accessors.featureCollection).toBeDefined(); - expect(lib.accessors.feature).toBeDefined(); - expect(lib.accessors.envelope).toBeDefined(); -}); - -test('constructors library defined', () => { - expect(lib.constructors.bezierSpline).toBeDefined(); - expect(lib.constructors.ellipse).toBeDefined(); -}); - -test('h3 library defined', () => { - expect(lib.h3.geoToH3).toBeDefined(); - expect(lib.h3.compact).toBeDefined(); - expect(lib.h3.h3Distance).toBeDefined(); - expect(lib.h3.h3IsValid).toBeDefined(); - expect(lib.h3.hexRing).toBeDefined(); - expect(lib.h3.h3IsPentagon).toBeDefined(); - expect(lib.h3.kRing).toBeDefined(); - expect(lib.h3.kRingDistances).toBeDefined(); - expect(lib.h3.polyfill).toBeDefined(); - expect(lib.h3.h3ToGeoBoundary).toBeDefined(); - expect(lib.h3.h3ToChildren).toBeDefined(); - expect(lib.h3.h3ToParent).toBeDefined(); - expect(lib.h3.uncompact).toBeDefined(); -}); - -test('measurements library defined', () => { - expect(lib.measurements.angle).toBeDefined(); - expect(lib.measurements.bearing).toBeDefined(); - expect(lib.measurements.featureCollection).toBeDefined(); - expect(lib.measurements.feature).toBeDefined(); - expect(lib.measurements.distanceWeight).toBeDefined(); -}); - -test('placekey library defined', () => { - expect(lib.placekey.placekeyIsValid).toBeDefined(); - expect(lib.placekey.h3ToPlacekey).toBeDefined(); - expect(lib.placekey.placekeyToH3).toBeDefined(); -}); - -test('processing library defined', () => { - expect(lib.processing.featureCollection).toBeDefined(); - expect(lib.processing.feature).toBeDefined(); - expect(lib.processing.voronoi).toBeDefined(); - expect(lib.processing.polygonToLine).toBeDefined(); -}); - -test('quadkey library defined', () => { - expect(lib.quadkey.bbox).toBeDefined(); - expect(lib.quadkey.toChildren).toBeDefined(); - expect(lib.quadkey.quadkeyFromQuadint).toBeDefined(); - expect(lib.quadkey.quadintFromQuadkey).toBeDefined(); - expect(lib.quadkey.quadintFromLocation).toBeDefined(); - expect(lib.quadkey.quadintToGeoJSON).toBeDefined(); - expect(lib.quadkey.geojsonToQuadints).toBeDefined(); - expect(lib.quadkey.ZXYFromQuadint).toBeDefined(); -}); - -test('s2 library defined', () => { - expect(lib.s2.keyToId).toBeDefined(); - expect(lib.s2.idToKey).toBeDefined(); - expect(lib.s2.latLngToKey).toBeDefined(); - expect(lib.s2.FromHilbertQuadKey).toBeDefined(); - expect(lib.s2.idToLatLng).toBeDefined(); -}); - -test('transformations library defined', () => { - expect(lib.transformations.featureCollection).toBeDefined(); - expect(lib.transformations.feature).toBeDefined(); - expect(lib.transformations.buffer).toBeDefined(); - expect(lib.transformations.centerMean).toBeDefined(); - expect(lib.transformations.centerMedian).toBeDefined(); - expect(lib.transformations.centerOfMass).toBeDefined(); - expect(lib.transformations.concave).toBeDefined(); - expect(lib.transformations.destination).toBeDefined(); - expect(lib.transformations.greatCircle).toBeDefined(); - expect(lib.transformations.along).toBeDefined(); -}); \ No newline at end of file diff --git a/clouds/bigquery/libraries/javascript/test/measurements.test.js b/clouds/bigquery/libraries/javascript/test/measurements.test.js new file mode 100644 index 000000000..e626382da --- /dev/null +++ b/clouds/bigquery/libraries/javascript/test/measurements.test.js @@ -0,0 +1,9 @@ +const measurementsLib = require('../build/measurements'); + +test('measurements library defined', () => { + expect(measurementsLib.angle).toBeDefined(); + expect(measurementsLib.bearing).toBeDefined(); + expect(measurementsLib.featureCollection).toBeDefined(); + expect(measurementsLib.feature).toBeDefined(); + expect(measurementsLib.distanceWeight).toBeDefined(); +}); \ No newline at end of file diff --git a/clouds/bigquery/libraries/javascript/test/placekey.test.js b/clouds/bigquery/libraries/javascript/test/placekey.test.js new file mode 100644 index 000000000..11b71f57d --- /dev/null +++ b/clouds/bigquery/libraries/javascript/test/placekey.test.js @@ -0,0 +1,7 @@ +const placekeyLib = require('../build/placekey'); + +test('placekey library defined', () => { + expect(placekeyLib.placekeyIsValid).toBeDefined(); + expect(placekeyLib.h3ToPlacekey).toBeDefined(); + expect(placekeyLib.placekeyToH3).toBeDefined(); +}); \ No newline at end of file diff --git a/clouds/bigquery/libraries/javascript/test/processing.test.js b/clouds/bigquery/libraries/javascript/test/processing.test.js new file mode 100644 index 000000000..07c5eb9ea --- /dev/null +++ b/clouds/bigquery/libraries/javascript/test/processing.test.js @@ -0,0 +1,8 @@ +const processingLib = require('../build/processing'); + +test('processing library defined', () => { + expect(processingLib.featureCollection).toBeDefined(); + expect(processingLib.feature).toBeDefined(); + expect(processingLib.voronoi).toBeDefined(); + expect(processingLib.polygonToLine).toBeDefined(); +}); \ No newline at end of file diff --git a/clouds/bigquery/libraries/javascript/test/quadkey.test.js b/clouds/bigquery/libraries/javascript/test/quadkey.test.js index 942731ce3..a97ab67df 100644 --- a/clouds/bigquery/libraries/javascript/test/quadkey.test.js +++ b/clouds/bigquery/libraries/javascript/test/quadkey.test.js @@ -1,12 +1,23 @@ -const lib = require('../build/index'); +const quadkeyLib = require('../build/quadkey'); // TODO: refactor tests +test('quadkey library defined', () => { + expect(quadkeyLib.bbox).toBeDefined(); + expect(quadkeyLib.toChildren).toBeDefined(); + expect(quadkeyLib.quadkeyFromQuadint).toBeDefined(); + expect(quadkeyLib.quadintFromQuadkey).toBeDefined(); + expect(quadkeyLib.quadintFromLocation).toBeDefined(); + expect(quadkeyLib.quadintToGeoJSON).toBeDefined(); + expect(quadkeyLib.geojsonToQuadints).toBeDefined(); + expect(quadkeyLib.ZXYFromQuadint).toBeDefined(); +}); + test('bbox should work', () => { - expect(lib.quadkey.bbox(162)).toEqual([-90, 0, 0, 66.51326044311186]); - expect(lib.quadkey.bbox(12070922)).toEqual([-45, 44.840290651397986, -44.6484375, 45.08903556483103]); - expect(lib.quadkey.bbox(791040491538)).toEqual([-45, 44.99976701918129, -44.998626708984375, 45.00073807829068]); - expect(lib.quadkey.bbox(12960460429066265n)).toEqual([-45, 44.999994612636684, -44.99998927116394, 45.00000219906962]); + expect(quadkeyLib.bbox(162)).toEqual([-90, 0, 0, 66.51326044311186]); + expect(quadkeyLib.bbox(12070922)).toEqual([-45, 44.840290651397986, -44.6484375, 45.08903556483103]); + expect(quadkeyLib.bbox(791040491538)).toEqual([-45, 44.99976701918129, -44.998626708984375, 45.00073807829068]); + expect(quadkeyLib.bbox(12960460429066265n)).toEqual([-45, 44.999994612636684, -44.99998927116394, 45.00000219906962]); }); test('toParent should work at any level of zoom', () => { @@ -14,27 +25,27 @@ test('toParent should work at any level of zoom', () => { for (z = 1; z < 30; ++z) { for (lat = -90; lat <= 90; lat = lat + 15) { for (lng = -180; lng <= 180; lng = lng + 15) { - const quadint = lib.quadkey.quadintFromLocation(lng, lat, z); - const currentParent = lib.quadkey.quadintFromLocation(lng, lat, z - 1); - expect(currentParent).toEqual(lib.quadkey.toParent(quadint, z - 1)); + const quadint = quadkeyLib.quadintFromLocation(lng, lat, z); + const currentParent = quadkeyLib.quadintFromLocation(lng, lat, z - 1); + expect(currentParent).toEqual(quadkeyLib.toParent(quadint, z - 1)); } } } for (z = 5; z < 30; ++z) { for (lat = -90; lat <= 90; lat = lat + 15) { for (lng = -180; lng <= 180; lng = lng + 15) { - const quadint = lib.quadkey.quadintFromLocation(lng, lat, z); - const currentParent = lib.quadkey.quadintFromLocation(lng, lat, z - 5); - expect(currentParent).toEqual(lib.quadkey.toParent(quadint, z - 5)); + const quadint = quadkeyLib.quadintFromLocation(lng, lat, z); + const currentParent = quadkeyLib.quadintFromLocation(lng, lat, z - 5); + expect(currentParent).toEqual(quadkeyLib.toParent(quadint, z - 5)); } } } for (z = 10; z < 30; ++z) { for (lat = -90; lat <= 90; lat = lat + 15) { for (lng = -180; lng <= 180; lng = lng + 15) { - const quadint = lib.quadkey.quadintFromLocation(lng, lat, z); - const currentParent = lib.quadkey.quadintFromLocation(lng, lat, z - 10); - expect(currentParent).toEqual(lib.quadkey.toParent(quadint, z - 10)); + const quadint = quadkeyLib.quadintFromLocation(lng, lat, z); + const currentParent = quadkeyLib.quadintFromLocation(lng, lat, z - 10); + expect(currentParent).toEqual(quadkeyLib.toParent(quadint, z - 10)); } } } @@ -45,10 +56,10 @@ test('toChildren should work at any level of zoom', () => { for (z = 0; z < 29; ++z) { for (lat = 90; lat <= 90; lat = lat + 15) { for (lng = -180; lng <= 180; lng = lng + 15) { - const quadint = lib.quadkey.quadintFromLocation(lng, lat, z); - const childs = lib.quadkey.toChildren(quadint, z + 1); + const quadint = quadkeyLib.quadintFromLocation(lng, lat, z); + const childs = quadkeyLib.toChildren(quadint, z + 1); childs.forEach((element) => { - expect(lib.quadkey.toParent(element, z)).toEqual(quadint); + expect(quadkeyLib.toParent(element, z)).toEqual(quadint); }); } } @@ -57,10 +68,10 @@ test('toChildren should work at any level of zoom', () => { for (z = 0; z < 25; ++z) { for (lat = 90; lat <= 90; lat = lat + 15) { for (lng = -180; lng <= 180; lng = lng + 15) { - const quadint = lib.quadkey.quadintFromLocation(lng, lat, z); - const childs = lib.quadkey.toChildren(quadint, z + 5); + const quadint = quadkeyLib.quadintFromLocation(lng, lat, z); + const childs = quadkeyLib.toChildren(quadint, z + 5); childs.forEach((element) => { - expect(lib.quadkey.toParent(element, z)).toEqual(quadint); + expect(quadkeyLib.toParent(element, z)).toEqual(quadint); }); } } @@ -78,7 +89,7 @@ test('Should be able to encode/decode between quadint and quadkey at any level o x = 0; y = 0; - let zxyDecoded = lib.quadkey.ZXYFromQuadint(lib.quadkey.quadintFromQuadkey(lib.quadkey.quadkeyFromQuadint(lib.quadkey.quadintFromZXY(z, x, y)))); + let zxyDecoded = quadkeyLib.ZXYFromQuadint(quadkeyLib.quadintFromQuadkey(quadkeyLib.quadkeyFromQuadint(quadkeyLib.quadintFromZXY(z, x, y)))); zDecoded = zxyDecoded.z; xDecoded = zxyDecoded.x; yDecoded = zxyDecoded.y; @@ -87,7 +98,7 @@ test('Should be able to encode/decode between quadint and quadkey at any level o if (z > 0) { x = tilesPerLevel / 2; y = tilesPerLevel / 2; - zxyDecoded = lib.quadkey.ZXYFromQuadint(lib.quadkey.quadintFromQuadkey(lib.quadkey.quadkeyFromQuadint(lib.quadkey.quadintFromZXY(z, x, y)))); + zxyDecoded = quadkeyLib.ZXYFromQuadint(quadkeyLib.quadintFromQuadkey(quadkeyLib.quadkeyFromQuadint(quadkeyLib.quadintFromZXY(z, x, y)))); zDecoded = zxyDecoded.z; xDecoded = zxyDecoded.x; yDecoded = zxyDecoded.y; @@ -95,7 +106,7 @@ test('Should be able to encode/decode between quadint and quadkey at any level o x = tilesPerLevel - 1; y = tilesPerLevel - 1; - zxyDecoded = lib.quadkey.ZXYFromQuadint(lib.quadkey.quadintFromQuadkey(lib.quadkey.quadkeyFromQuadint(lib.quadkey.quadintFromZXY(z, x, y)))); + zxyDecoded = quadkeyLib.ZXYFromQuadint(quadkeyLib.quadintFromQuadkey(quadkeyLib.quadkeyFromQuadint(quadkeyLib.quadintFromZXY(z, x, y)))); zDecoded = zxyDecoded.z; xDecoded = zxyDecoded.x; yDecoded = zxyDecoded.y; @@ -115,7 +126,7 @@ test('Should be able to encode/decode tiles at any level of zoom', async () => { x = 0; y = 0; - let zxyDecoded = lib.quadkey.ZXYFromQuadint(lib.quadkey.quadintFromZXY(z, x, y)); + let zxyDecoded = quadkeyLib.ZXYFromQuadint(quadkeyLib.quadintFromZXY(z, x, y)); zDecoded = zxyDecoded.z; xDecoded = zxyDecoded.x; yDecoded = zxyDecoded.y; @@ -124,7 +135,7 @@ test('Should be able to encode/decode tiles at any level of zoom', async () => { if (z > 0) { x = tilesPerLevel / 2; y = tilesPerLevel / 2; - zxyDecoded = lib.quadkey.ZXYFromQuadint(lib.quadkey.quadintFromZXY(z, x, y)); + zxyDecoded = quadkeyLib.ZXYFromQuadint(quadkeyLib.quadintFromZXY(z, x, y)); zDecoded = zxyDecoded.z; xDecoded = zxyDecoded.x; yDecoded = zxyDecoded.y; @@ -132,7 +143,7 @@ test('Should be able to encode/decode tiles at any level of zoom', async () => { x = tilesPerLevel - 1; y = tilesPerLevel - 1; - zxyDecoded = lib.quadkey.ZXYFromQuadint(lib.quadkey.quadintFromZXY(z, x, y)); + zxyDecoded = quadkeyLib.ZXYFromQuadint(quadkeyLib.quadintFromZXY(z, x, y)); zDecoded = zxyDecoded.z; xDecoded = zxyDecoded.x; yDecoded = zxyDecoded.y; diff --git a/clouds/bigquery/libraries/javascript/test/random.test.js b/clouds/bigquery/libraries/javascript/test/random.test.js index f21b4dd35..4a02d3648 100644 --- a/clouds/bigquery/libraries/javascript/test/random.test.js +++ b/clouds/bigquery/libraries/javascript/test/random.test.js @@ -1,5 +1,5 @@ -const lib = require('../build/index'); +const randomLib = require('../build/random'); test('random library defined', () => { - expect(lib.random.generateRandomPointsInPolygon).toBeDefined(); + expect(randomLib.generateRandomPointsInPolygon).toBeDefined(); }); \ No newline at end of file diff --git a/clouds/bigquery/libraries/javascript/test/s2.test.js b/clouds/bigquery/libraries/javascript/test/s2.test.js new file mode 100644 index 000000000..e5381e717 --- /dev/null +++ b/clouds/bigquery/libraries/javascript/test/s2.test.js @@ -0,0 +1,9 @@ +const s2Lib = require('../build/s2'); + +test('s2 library defined', () => { + expect(s2Lib.keyToId).toBeDefined(); + expect(s2Lib.idToKey).toBeDefined(); + expect(s2Lib.latLngToKey).toBeDefined(); + expect(s2Lib.FromHilbertQuadKey).toBeDefined(); + expect(s2Lib.idToLatLng).toBeDefined(); +}); \ No newline at end of file diff --git a/clouds/bigquery/libraries/javascript/test/transformations.test.js b/clouds/bigquery/libraries/javascript/test/transformations.test.js new file mode 100644 index 000000000..7a9ee511e --- /dev/null +++ b/clouds/bigquery/libraries/javascript/test/transformations.test.js @@ -0,0 +1,14 @@ +const transformationsLib = require('../build/transformations'); + +test('transformations library defined', () => { + expect(transformationsLib.featureCollection).toBeDefined(); + expect(transformationsLib.feature).toBeDefined(); + expect(transformationsLib.buffer).toBeDefined(); + expect(transformationsLib.centerMean).toBeDefined(); + expect(transformationsLib.centerMedian).toBeDefined(); + expect(transformationsLib.centerOfMass).toBeDefined(); + expect(transformationsLib.concave).toBeDefined(); + expect(transformationsLib.destination).toBeDefined(); + expect(transformationsLib.greatCircle).toBeDefined(); + expect(transformationsLib.along).toBeDefined(); +}); \ No newline at end of file diff --git a/clouds/bigquery/modules/Makefile b/clouds/bigquery/modules/Makefile index 61892d7b8..877caaa64 100644 --- a/clouds/bigquery/modules/Makefile +++ b/clouds/bigquery/modules/Makefile @@ -28,7 +28,7 @@ endif BQ_MODULE_LABEL ?= spatial_extension_module:core MODULE_PERMISSIONS_BASH ?= set_module_permissions.sh -REPLACEMENTS = "BQ_DATASET BQ_VERSION_FUNCTION BQ_PACKAGE_VERSION BQ_LIBRARY_BUCKET BQ_PROJECT BQ_REGION" +REPLACEMENTS = "BQ_DATASET BQ_VERSION_FUNCTION BQ_PACKAGE_VERSION BQ_PROJECT BQ_REGION" include $(COMMON_DIR)/Makefile @@ -57,7 +57,7 @@ build: $(NODE_MODULES_DEV) REPLACEMENTS=$(REPLACEMENTS)" "$(REPLACEMENTS_EXTRA) \ $(COMMON_DIR)/build_modules.js $(MODULES_DIRS) \ --output=$(BUILD_DIR) --libs_build_dir=$(LIBS_BUILD_DIR) --diff="$(diff)" \ - --modules=$(modules) --functions=$(functions) --production=$(production) --nodeps=$(nodeps) --dropfirst=$(dropfirst) + --modules=$(modules) --functions=$(functions) --production=$(production) --nodeps=$(nodeps) --dropfirst=$(dropfirst) --librarybucket=$(BQ_LIBRARY_BUCKET) --makelib=$(MAKE_LIB) deploy: check build echo "Deploying modules..." diff --git a/clouds/bigquery/modules/sql/accessors/ST_ENVELOPE.sql b/clouds/bigquery/modules/sql/accessors/ST_ENVELOPE.sql index 21426549e..149d5009b 100644 --- a/clouds/bigquery/modules/sql/accessors/ST_ENVELOPE.sql +++ b/clouds/bigquery/modules/sql/accessors/ST_ENVELOPE.sql @@ -8,15 +8,15 @@ RETURNS STRING DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_ACCESSORS_BUCKET@@"] ) AS """ if (!geojson) { return null; } - const featuresCollection = lib.accessors.featureCollection(geojson.map(x => lib.accessors.feature(JSON.parse(x)))); - const enveloped = lib.accessors.envelope(featuresCollection); + const featuresCollection = accessorsLib.featureCollection(geojson.map(x => accessorsLib.feature(JSON.parse(x)))); + const enveloped = accessorsLib.envelope(featuresCollection); return JSON.stringify(enveloped.geometry); """; diff --git a/clouds/bigquery/modules/sql/clustering/ST_CLUSTERKMEANS.sql b/clouds/bigquery/modules/sql/clustering/ST_CLUSTERKMEANS.sql index 3a84db951..109b67a51 100644 --- a/clouds/bigquery/modules/sql/clustering/ST_CLUSTERKMEANS.sql +++ b/clouds/bigquery/modules/sql/clustering/ST_CLUSTERKMEANS.sql @@ -7,7 +7,7 @@ CREATE OR REPLACE FUNCTION `@@BQ_DATASET@@.__CLUSTERKMEANS` RETURNS ARRAY> DETERMINISTIC LANGUAGE js -OPTIONS (library = ["@@BQ_LIBRARY_BUCKET@@"]) +OPTIONS (library = ["@@BQ_LIBRARY_CLUSTERING_BUCKET@@"]) AS """ if (!geojson) { return null; @@ -19,8 +19,8 @@ AS """ options.numberOfClusters = parseInt(Math.sqrt(geojson.length/2)) } options.mutate = true; - const featuresCollection = lib.clustering.featureCollection(lib.clustering.prioritizeDistinctSort(geojson).map(x => lib.clustering.feature(JSON.parse(x)))); - lib.clustering.clustersKmeans(featuresCollection, options); + const featuresCollection = clusteringLib.featureCollection(clusteringLib.prioritizeDistinctSort(geojson).map(x => clusteringLib.feature(JSON.parse(x)))); + clusteringLib.clustersKmeans(featuresCollection, options); const cluster = []; featuresCollection.features.forEach(function(item, index, array) { cluster.push({cluster: item.properties.cluster, geom: JSON.stringify(item.geometry)}); diff --git a/clouds/bigquery/modules/sql/constructors/ST_BEZIERSPLINE.sql b/clouds/bigquery/modules/sql/constructors/ST_BEZIERSPLINE.sql index 91898fcce..f50e2ee45 100644 --- a/clouds/bigquery/modules/sql/constructors/ST_BEZIERSPLINE.sql +++ b/clouds/bigquery/modules/sql/constructors/ST_BEZIERSPLINE.sql @@ -8,7 +8,7 @@ RETURNS STRING DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_CONSTRUCTORS_BUCKET@@"] ) AS """ if (!geojson) { @@ -21,7 +21,7 @@ AS """ if (sharpness != null) { options.sharpness = Number(sharpness); } - const curved = lib.constructors.bezierSpline(JSON.parse(geojson), options); + const curved = constructorsLib.bezierSpline(JSON.parse(geojson), options); return JSON.stringify(curved.geometry); """; diff --git a/clouds/bigquery/modules/sql/constructors/ST_MAKEELLIPSE.sql b/clouds/bigquery/modules/sql/constructors/ST_MAKEELLIPSE.sql index b72e5d255..62bab28c6 100644 --- a/clouds/bigquery/modules/sql/constructors/ST_MAKEELLIPSE.sql +++ b/clouds/bigquery/modules/sql/constructors/ST_MAKEELLIPSE.sql @@ -15,7 +15,7 @@ RETURNS STRING DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_CONSTRUCTORS_BUCKET@@"] ) AS """ if (!geojson || xSemiAxis == null || ySemiAxis == null) { @@ -31,7 +31,7 @@ AS """ if (steps != null) { options.steps = Number(steps); } - const ellipse = lib.constructors.ellipse(JSON.parse(geojson), Number(xSemiAxis), Number(ySemiAxis), options); + const ellipse = constructorsLib.ellipse(JSON.parse(geojson), Number(xSemiAxis), Number(ySemiAxis), options); return JSON.stringify(ellipse.geometry); """; diff --git a/clouds/bigquery/modules/sql/h3/H3_BOUNDARY.sql b/clouds/bigquery/modules/sql/h3/H3_BOUNDARY.sql index 81d6c7841..7de740d5b 100644 --- a/clouds/bigquery/modules/sql/h3/H3_BOUNDARY.sql +++ b/clouds/bigquery/modules/sql/h3/H3_BOUNDARY.sql @@ -8,18 +8,18 @@ RETURNS STRING DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_H3_BUCKET@@"] ) AS """ if (!index) { return null; } - if (!lib.h3.h3IsValid(index)) { + if (!h3Lib.h3IsValid(index)) { return null; } - const coords = lib.h3.h3ToGeoBoundary(index, true); + const coords = h3Lib.h3ToGeoBoundary(index, true); let output = `POLYGON((`; for (let i = 0; i < coords.length - 1; i++) { output += coords[i][0] + ` ` + coords[i][1] + `,`; diff --git a/clouds/bigquery/modules/sql/h3/H3_CENTER.sql b/clouds/bigquery/modules/sql/h3/H3_CENTER.sql index 7c6ec43c3..072de1509 100644 --- a/clouds/bigquery/modules/sql/h3/H3_CENTER.sql +++ b/clouds/bigquery/modules/sql/h3/H3_CENTER.sql @@ -8,18 +8,18 @@ RETURNS STRING DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_H3_BUCKET@@"] ) AS """ if (!index) { return null; } - if (!lib.h3.h3IsValid(index)) { + if (!h3Lib.h3IsValid(index)) { return null; } - const center = lib.h3.h3ToGeo(index); + const center = h3Lib.h3ToGeo(index); return `POINT(`+center[1] + ` ` + center[0] + `)`; """; diff --git a/clouds/bigquery/modules/sql/h3/H3_COMPACT.sql b/clouds/bigquery/modules/sql/h3/H3_COMPACT.sql index f7e51887c..77c3755e3 100644 --- a/clouds/bigquery/modules/sql/h3/H3_COMPACT.sql +++ b/clouds/bigquery/modules/sql/h3/H3_COMPACT.sql @@ -8,11 +8,11 @@ RETURNS ARRAY DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_H3_BUCKET@@"] ) AS """ if (h3Array === null) { return null; } - return lib.h3.compact(h3Array); + return h3Lib.compact(h3Array); """; diff --git a/clouds/bigquery/modules/sql/h3/H3_DISTANCE.sql b/clouds/bigquery/modules/sql/h3/H3_DISTANCE.sql index c775a3648..635d237b9 100644 --- a/clouds/bigquery/modules/sql/h3/H3_DISTANCE.sql +++ b/clouds/bigquery/modules/sql/h3/H3_DISTANCE.sql @@ -8,13 +8,13 @@ RETURNS INT64 DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_H3_BUCKET@@"] ) AS """ if (!index1 || !index2) { return null; } - let dist = lib.h3.h3Distance(index1, index2); + let dist = h3Lib.h3Distance(index1, index2); if (dist < 0) { dist = null; } diff --git a/clouds/bigquery/modules/sql/h3/H3_FROMLONGLAT.sql b/clouds/bigquery/modules/sql/h3/H3_FROMLONGLAT.sql index 66e1007fa..0e3ae58b9 100644 --- a/clouds/bigquery/modules/sql/h3/H3_FROMLONGLAT.sql +++ b/clouds/bigquery/modules/sql/h3/H3_FROMLONGLAT.sql @@ -8,11 +8,11 @@ RETURNS STRING DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_H3_BUCKET@@"] ) AS """ if (longitude === null || latitude === null || resolution === null) { return null; } - return lib.h3.geoToH3(Number(latitude), Number(longitude), Number(resolution)); + return h3Lib.geoToH3(Number(latitude), Number(longitude), Number(resolution)); """; diff --git a/clouds/bigquery/modules/sql/h3/H3_HEXRING.sql b/clouds/bigquery/modules/sql/h3/H3_HEXRING.sql index fd3d0ea7b..85bf8a951 100644 --- a/clouds/bigquery/modules/sql/h3/H3_HEXRING.sql +++ b/clouds/bigquery/modules/sql/h3/H3_HEXRING.sql @@ -8,14 +8,14 @@ RETURNS ARRAY DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_H3_BUCKET@@"] ) AS """ - if (!lib.h3.h3IsValid(origin)) { + if (!h3Lib.h3IsValid(origin)) { throw new Error('Invalid input origin') } if (size == null || size < 0) { throw new Error('Invalid input size') } - return lib.h3.hexRing(origin, parseInt(size)); + return h3Lib.hexRing(origin, parseInt(size)); """; diff --git a/clouds/bigquery/modules/sql/h3/H3_ISPENTAGON.sql b/clouds/bigquery/modules/sql/h3/H3_ISPENTAGON.sql index 87a506799..e3eb67c82 100644 --- a/clouds/bigquery/modules/sql/h3/H3_ISPENTAGON.sql +++ b/clouds/bigquery/modules/sql/h3/H3_ISPENTAGON.sql @@ -8,11 +8,11 @@ RETURNS BOOLEAN DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_H3_BUCKET@@"] ) AS """ if (!index) { return false; } - return lib.h3.h3IsPentagon(index); + return h3Lib.h3IsPentagon(index); """; diff --git a/clouds/bigquery/modules/sql/h3/H3_ISVALID.sql b/clouds/bigquery/modules/sql/h3/H3_ISVALID.sql index f7a5feecf..596ccc659 100644 --- a/clouds/bigquery/modules/sql/h3/H3_ISVALID.sql +++ b/clouds/bigquery/modules/sql/h3/H3_ISVALID.sql @@ -8,11 +8,11 @@ RETURNS BOOLEAN DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_H3_BUCKET@@"] ) AS """ if (!index) { return false; } - return lib.h3.h3IsValid(index); + return h3Lib.h3IsValid(index); """; diff --git a/clouds/bigquery/modules/sql/h3/H3_KRING.sql b/clouds/bigquery/modules/sql/h3/H3_KRING.sql index cc759286e..edf8a2f22 100644 --- a/clouds/bigquery/modules/sql/h3/H3_KRING.sql +++ b/clouds/bigquery/modules/sql/h3/H3_KRING.sql @@ -8,14 +8,14 @@ RETURNS ARRAY DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_H3_BUCKET@@"] ) AS """ - if (!lib.h3.h3IsValid(origin)) { + if (!h3Lib.h3IsValid(origin)) { throw new Error('Invalid input origin') } if (size == null || size < 0) { throw new Error('Invalid input size') } - return lib.h3.kRing(origin, parseInt(size)); + return h3Lib.kRing(origin, parseInt(size)); """; diff --git a/clouds/bigquery/modules/sql/h3/H3_KRING_DISTANCES.sql b/clouds/bigquery/modules/sql/h3/H3_KRING_DISTANCES.sql index 3ba661433..60c03a6ae 100644 --- a/clouds/bigquery/modules/sql/h3/H3_KRING_DISTANCES.sql +++ b/clouds/bigquery/modules/sql/h3/H3_KRING_DISTANCES.sql @@ -8,16 +8,16 @@ RETURNS ARRAY> DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_H3_BUCKET@@"] ) AS """ - if (!lib.h3.h3IsValid(origin)) { + if (!h3Lib.h3IsValid(origin)) { throw new Error('Invalid input origin') } if (size == null || size < 0) { throw new Error('Invalid input size') } - const kringDistances = lib.h3.kRingDistances(origin, size); + const kringDistances = h3Lib.kRingDistances(origin, size); const output = []; for (let distance = 0; distance <= size; distance++) { const indexes = kringDistances[distance]; diff --git a/clouds/bigquery/modules/sql/h3/H3_POLYFILL.sql b/clouds/bigquery/modules/sql/h3/H3_POLYFILL.sql index 581d39bc7..c37b2e6ee 100644 --- a/clouds/bigquery/modules/sql/h3/H3_POLYFILL.sql +++ b/clouds/bigquery/modules/sql/h3/H3_POLYFILL.sql @@ -8,7 +8,7 @@ RETURNS ARRAY DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_H3_BUCKET@@"] ) AS """ const resolution = Number(_resolution) @@ -21,28 +21,28 @@ AS """ case 'GeometryCollection': featureGeometry.geometries.forEach(function (geom) { if (geom.type === 'MultiPolygon') { - var clippedGeometryA = lib.h3.bboxClip(geom, bboxA).geometry + var clippedGeometryA = h3Lib.bboxClip(geom, bboxA).geometry polygonCoordinatesA = polygonCoordinatesA.concat(clippedGeometryA.coordinates) - var clippedGeometryB = lib.h3.bboxClip(geom, bboxB).geometry + var clippedGeometryB = h3Lib.bboxClip(geom, bboxB).geometry polygonCoordinatesB = polygonCoordinatesB.concat(clippedGeometryB.coordinates) } else if (geom.type === 'Polygon') { - var clippedGeometryA = lib.h3.bboxClip(geom, bboxA).geometry + var clippedGeometryA = h3Lib.bboxClip(geom, bboxA).geometry polygonCoordinatesA = polygonCoordinatesA.concat([clippedGeometryA.coordinates]) - var clippedGeometryB = lib.h3.bboxClip(geom, bboxB).geometry + var clippedGeometryB = h3Lib.bboxClip(geom, bboxB).geometry polygonCoordinatesB = polygonCoordinatesB.concat([clippedGeometryB.coordinates]) } }) break case 'MultiPolygon': - var clippedGeometryA = lib.h3.bboxClip(featureGeometry, bboxA).geometry + var clippedGeometryA = h3Lib.bboxClip(featureGeometry, bboxA).geometry polygonCoordinatesA = clippedGeometryA.coordinates - var clippedGeometryB = lib.h3.bboxClip(featureGeometry, bboxB).geometry + var clippedGeometryB = h3Lib.bboxClip(featureGeometry, bboxB).geometry polygonCoordinatesB = clippedGeometryB.coordinates break case 'Polygon': - var clippedGeometryA = lib.h3.bboxClip(featureGeometry, bboxA).geometry + var clippedGeometryA = h3Lib.bboxClip(featureGeometry, bboxA).geometry polygonCoordinatesA = [clippedGeometryA.coordinates] - var clippedGeometryB = lib.h3.bboxClip(featureGeometry, bboxB).geometry + var clippedGeometryB = h3Lib.bboxClip(featureGeometry, bboxB).geometry polygonCoordinatesB = [clippedGeometryB.coordinates] break default: @@ -54,11 +54,11 @@ AS """ } let hexesA = polygonCoordinatesA.reduce( - (acc, coordinates) => acc.concat(lib.h3.polyfill(coordinates, resolution, true)), + (acc, coordinates) => acc.concat(h3Lib.polyfill(coordinates, resolution, true)), [] ).filter(h => h != null) let hexesB = polygonCoordinatesB.reduce( - (acc, coordinates) => acc.concat(lib.h3.polyfill(coordinates, resolution, true)), + (acc, coordinates) => acc.concat(h3Lib.polyfill(coordinates, resolution, true)), [] ).filter(h => h != null) hexes = [...hexesA, ...hexesB] diff --git a/clouds/bigquery/modules/sql/h3/H3_RESOLUTION.sql b/clouds/bigquery/modules/sql/h3/H3_RESOLUTION.sql index d296809c2..dd5ee3e8b 100644 --- a/clouds/bigquery/modules/sql/h3/H3_RESOLUTION.sql +++ b/clouds/bigquery/modules/sql/h3/H3_RESOLUTION.sql @@ -8,16 +8,16 @@ RETURNS INT64 DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_H3_BUCKET@@"] ) AS """ if (!index) { return null; } - if (!lib.h3.h3IsValid(index)) { + if (!h3Lib.h3IsValid(index)) { return null; } - return lib.h3.h3GetResolution(index); + return h3Lib.h3GetResolution(index); """; diff --git a/clouds/bigquery/modules/sql/h3/H3_TOCHILDREN.sql b/clouds/bigquery/modules/sql/h3/H3_TOCHILDREN.sql index 7ad375f91..ecc9a0956 100644 --- a/clouds/bigquery/modules/sql/h3/H3_TOCHILDREN.sql +++ b/clouds/bigquery/modules/sql/h3/H3_TOCHILDREN.sql @@ -8,14 +8,14 @@ RETURNS ARRAY DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_H3_BUCKET@@"] ) AS """ if (!index) { return null; } - if (!lib.h3.h3IsValid(index)) { + if (!h3Lib.h3IsValid(index)) { return null; } - return lib.h3.h3ToChildren(index, Number(resolution)); + return h3Lib.h3ToChildren(index, Number(resolution)); """; diff --git a/clouds/bigquery/modules/sql/h3/H3_TOPARENT.sql b/clouds/bigquery/modules/sql/h3/H3_TOPARENT.sql index f1a0252c3..2ecb09379 100644 --- a/clouds/bigquery/modules/sql/h3/H3_TOPARENT.sql +++ b/clouds/bigquery/modules/sql/h3/H3_TOPARENT.sql @@ -8,14 +8,14 @@ RETURNS STRING DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_H3_BUCKET@@"] ) AS """ if (!index) { return null; } - if (!lib.h3.h3IsValid(index)) { + if (!h3Lib.h3IsValid(index)) { return null; } - return lib.h3.h3ToParent(index, Number(resolution)); + return h3Lib.h3ToParent(index, Number(resolution)); """; diff --git a/clouds/bigquery/modules/sql/h3/H3_UNCOMPACT.sql b/clouds/bigquery/modules/sql/h3/H3_UNCOMPACT.sql index 6ef711b99..e560dd468 100644 --- a/clouds/bigquery/modules/sql/h3/H3_UNCOMPACT.sql +++ b/clouds/bigquery/modules/sql/h3/H3_UNCOMPACT.sql @@ -8,11 +8,11 @@ RETURNS ARRAY DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_H3_BUCKET@@"] ) AS """ if (h3Array === null || resolution === null || resolution < 0 || resolution > 15) { return null; } - return lib.h3.uncompact(h3Array, Number(resolution)); + return h3Lib.uncompact(h3Array, Number(resolution)); """; diff --git a/clouds/bigquery/modules/sql/measurements/ST_ANGLE.sql b/clouds/bigquery/modules/sql/measurements/ST_ANGLE.sql index 97fd17e8a..e4059b1a0 100644 --- a/clouds/bigquery/modules/sql/measurements/ST_ANGLE.sql +++ b/clouds/bigquery/modules/sql/measurements/ST_ANGLE.sql @@ -8,7 +8,7 @@ RETURNS FLOAT64 DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_MEASUREMENTS_BUCKET@@"] ) AS """ if (!geojsonStart || !geojsonMid || !geojsonEnd) { @@ -18,7 +18,7 @@ AS """ if(mercator != null) { options.mercator = mercator; } - return lib.measurements.angle(JSON.parse(geojsonStart), JSON.parse(geojsonMid), JSON.parse(geojsonEnd), options); + return measurementsLib.angle(JSON.parse(geojsonStart), JSON.parse(geojsonMid), JSON.parse(geojsonEnd), options); """; CREATE OR REPLACE FUNCTION `@@BQ_DATASET@@.ST_ANGLE` diff --git a/clouds/bigquery/modules/sql/measurements/ST_MINKOWSKIDISTANCE.sql b/clouds/bigquery/modules/sql/measurements/ST_MINKOWSKIDISTANCE.sql index 5a0c7d826..2e448e14e 100644 --- a/clouds/bigquery/modules/sql/measurements/ST_MINKOWSKIDISTANCE.sql +++ b/clouds/bigquery/modules/sql/measurements/ST_MINKOWSKIDISTANCE.sql @@ -8,7 +8,7 @@ RETURNS ARRAY DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_MEASUREMENTS_BUCKET@@"] ) AS """ if (!geojson) { @@ -18,8 +18,8 @@ AS """ if(p != null) { options.p = Number(p); } - const features = lib.measurements.featureCollection(geojson.map(x => lib.measurements.feature(JSON.parse(x)))); - const distance = lib.measurements.distanceWeight(features, options); + const features = measurementsLib.featureCollection(geojson.map(x => measurementsLib.feature(JSON.parse(x)))); + const distance = measurementsLib.distanceWeight(features, options); return distance; """; diff --git a/clouds/bigquery/modules/sql/placekey/PLACEKEY_FROMH3.sql b/clouds/bigquery/modules/sql/placekey/PLACEKEY_FROMH3.sql index ea1857052..913af2afb 100644 --- a/clouds/bigquery/modules/sql/placekey/PLACEKEY_FROMH3.sql +++ b/clouds/bigquery/modules/sql/placekey/PLACEKEY_FROMH3.sql @@ -9,10 +9,10 @@ RETURNS STRING DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_PLACEKEY_BUCKET@@"] ) AS """ - return lib.placekey.h3ToPlacekey(h3Index); + return placekeyLib.h3ToPlacekey(h3Index); """; CREATE OR REPLACE FUNCTION `@@BQ_DATASET@@.PLACEKEY_FROMH3` diff --git a/clouds/bigquery/modules/sql/placekey/PLACEKEY_ISVALID.sql b/clouds/bigquery/modules/sql/placekey/PLACEKEY_ISVALID.sql index d6f59e7ae..4af181cf3 100644 --- a/clouds/bigquery/modules/sql/placekey/PLACEKEY_ISVALID.sql +++ b/clouds/bigquery/modules/sql/placekey/PLACEKEY_ISVALID.sql @@ -8,8 +8,8 @@ RETURNS BOOLEAN DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_PLACEKEY_BUCKET@@"] ) AS """ - return lib.placekey.placekeyIsValid(placekey); + return placekeyLib.placekeyIsValid(placekey); """; diff --git a/clouds/bigquery/modules/sql/placekey/PLACEKEY_TOH3.sql b/clouds/bigquery/modules/sql/placekey/PLACEKEY_TOH3.sql index de43d1d69..b333246ad 100644 --- a/clouds/bigquery/modules/sql/placekey/PLACEKEY_TOH3.sql +++ b/clouds/bigquery/modules/sql/placekey/PLACEKEY_TOH3.sql @@ -8,11 +8,11 @@ RETURNS STRING DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_PLACEKEY_BUCKET@@"] ) AS """ - if (!lib.placekey.placekeyIsValid(placekey)) { + if (!placekeyLib.placekeyIsValid(placekey)) { return null; } - return lib.placekey.placekeyToH3(placekey); + return placekeyLib.placekeyToH3(placekey); """; diff --git a/clouds/bigquery/modules/sql/processing/__VORONOIHELPER.sql b/clouds/bigquery/modules/sql/processing/__VORONOIHELPER.sql index 92c4dc72f..9d672f1ba 100644 --- a/clouds/bigquery/modules/sql/processing/__VORONOIHELPER.sql +++ b/clouds/bigquery/modules/sql/processing/__VORONOIHELPER.sql @@ -8,7 +8,7 @@ RETURNS ARRAY DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_PROCESSING_BUCKET@@"] ) AS """ if (!geojson) { @@ -26,8 +26,8 @@ AS """ options.bbox = bbox; } - const featuresCollection = lib.processing.featureCollection(geojson.map(x => lib.processing.feature(JSON.parse(x)))); - const voronoiPolygons = lib.processing.voronoi(featuresCollection, options); + const featuresCollection = processingLib.featureCollection(geojson.map(x => processingLib.feature(JSON.parse(x)))); + const voronoiPolygons = processingLib.voronoi(featuresCollection, options); const returnArray = []; @@ -39,7 +39,7 @@ AS """ if (typeOfVoronoi === 'lines') { voronoiPolygons.features.forEach( function(item) { - let lineFeature = lib.processing.polygonToLine(item.geometry); + let lineFeature = processingLib.polygonToLine(item.geometry); returnArray.push(JSON.stringify(lineFeature.geometry)); }); } diff --git a/clouds/bigquery/modules/sql/quadkey/QUADINT_FROMLONGLAT.sql b/clouds/bigquery/modules/sql/quadkey/QUADINT_FROMLONGLAT.sql index 4974b734c..8a880405b 100644 --- a/clouds/bigquery/modules/sql/quadkey/QUADINT_FROMLONGLAT.sql +++ b/clouds/bigquery/modules/sql/quadkey/QUADINT_FROMLONGLAT.sql @@ -8,11 +8,11 @@ RETURNS INT64 DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_QUADKEY_BUCKET@@"] ) AS """ if (longitude == null || latitude == null || resolution == null) { throw new Error('NULL argument passed to UDF'); } - return lib.quadkey.quadintFromLocation(Number(longitude), Number(latitude), Number(resolution)).toString(); + return quadkeyLib.quadintFromLocation(Number(longitude), Number(latitude), Number(resolution)).toString(); """; diff --git a/clouds/bigquery/modules/sql/quadkey/QUADINT_FROMLONGLAT_ZOOMRANGE.sql b/clouds/bigquery/modules/sql/quadkey/QUADINT_FROMLONGLAT_ZOOMRANGE.sql index aec3b762d..f47398494 100644 --- a/clouds/bigquery/modules/sql/quadkey/QUADINT_FROMLONGLAT_ZOOMRANGE.sql +++ b/clouds/bigquery/modules/sql/quadkey/QUADINT_FROMLONGLAT_ZOOMRANGE.sql @@ -15,7 +15,7 @@ RETURNS ARRAY> DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_QUADKEY_BUCKET@@"] ) AS """ if (longitude === undefined || longitude === null || latitude === undefined || latitude === null) { @@ -28,8 +28,8 @@ AS """ const qintIdx = []; for (let i = zoomMin; i <= zoomMax; i += zoomStep) { - const key = lib.quadkey.quadintFromLocation(longitude, latitude, i + intResolution); - const zxy = lib.quadkey.ZXYFromQuadint(key); + const key = quadkeyLib.quadintFromLocation(longitude, latitude, i + intResolution); + const zxy = quadkeyLib.ZXYFromQuadint(key); qintIdx.push({ id : key.toString(), z : i, x : zxy.x >>> intResolution, y : zxy.y >>> intResolution}); } return qintIdx; diff --git a/clouds/bigquery/modules/sql/quadkey/QUADINT_FROMQUADKEY.sql b/clouds/bigquery/modules/sql/quadkey/QUADINT_FROMQUADKEY.sql index 0b1d1d113..092889d5d 100644 --- a/clouds/bigquery/modules/sql/quadkey/QUADINT_FROMQUADKEY.sql +++ b/clouds/bigquery/modules/sql/quadkey/QUADINT_FROMQUADKEY.sql @@ -8,8 +8,8 @@ RETURNS INT64 DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_QUADKEY_BUCKET@@"] ) AS """ - return lib.quadkey.quadintFromQuadkey(quadkey).toString(); + return quadkeyLib.quadintFromQuadkey(quadkey).toString(); """; diff --git a/clouds/bigquery/modules/sql/quadkey/QUADINT_POLYFILL.sql b/clouds/bigquery/modules/sql/quadkey/QUADINT_POLYFILL.sql index 2c91f7f95..aeee59ea6 100644 --- a/clouds/bigquery/modules/sql/quadkey/QUADINT_POLYFILL.sql +++ b/clouds/bigquery/modules/sql/quadkey/QUADINT_POLYFILL.sql @@ -8,7 +8,7 @@ RETURNS ARRAY DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_QUADKEY_BUCKET@@"] ) AS """ if (!geojson || resolution == null) { @@ -18,13 +18,13 @@ AS """ let quadints = []; if (pol.type == 'GeometryCollection') { pol.geometries.forEach(function (geom) { - quadints = quadints.concat(lib.quadkey.geojsonToQuadints(geom, {min_zoom: Number(resolution), max_zoom: Number(resolution)})); + quadints = quadints.concat(quadkeyLib.geojsonToQuadints(geom, {min_zoom: Number(resolution), max_zoom: Number(resolution)})); }); quadints = Array.from(new Set(quadints)); } else { - quadints = lib.quadkey.geojsonToQuadints(pol, {min_zoom: Number(resolution), max_zoom: Number(resolution)}); + quadints = quadkeyLib.geojsonToQuadints(pol, {min_zoom: Number(resolution), max_zoom: Number(resolution)}); } return quadints.map(String); """; diff --git a/clouds/bigquery/modules/sql/quadkey/QUADINT_TOCHILDREN.sql b/clouds/bigquery/modules/sql/quadkey/QUADINT_TOCHILDREN.sql index c710f92f8..45f1459b6 100644 --- a/clouds/bigquery/modules/sql/quadkey/QUADINT_TOCHILDREN.sql +++ b/clouds/bigquery/modules/sql/quadkey/QUADINT_TOCHILDREN.sql @@ -8,12 +8,12 @@ RETURNS ARRAY DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_QUADKEY_BUCKET@@"] ) AS """ if (quadint == null || resolution == null) { throw new Error('NULL argument passed to UDF'); } - const quadints = lib.quadkey.toChildren(quadint, Number(resolution)); + const quadints = quadkeyLib.toChildren(quadint, Number(resolution)); return quadints.map(String); """; diff --git a/clouds/bigquery/modules/sql/quadkey/QUADINT_TOQUADKEY.sql b/clouds/bigquery/modules/sql/quadkey/QUADINT_TOQUADKEY.sql index ff8e599d1..df2589eb3 100644 --- a/clouds/bigquery/modules/sql/quadkey/QUADINT_TOQUADKEY.sql +++ b/clouds/bigquery/modules/sql/quadkey/QUADINT_TOQUADKEY.sql @@ -8,11 +8,11 @@ RETURNS STRING DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_QUADKEY_BUCKET@@"] ) AS """ if (quadint == null) { throw new Error('NULL argument passed to UDF'); } - return lib.quadkey.quadkeyFromQuadint(quadint); + return quadkeyLib.quadkeyFromQuadint(quadint); """; diff --git a/clouds/bigquery/modules/sql/random/ST_GENERATEPOINTS.sql b/clouds/bigquery/modules/sql/random/ST_GENERATEPOINTS.sql index e63946a21..adff4a6de 100644 --- a/clouds/bigquery/modules/sql/random/ST_GENERATEPOINTS.sql +++ b/clouds/bigquery/modules/sql/random/ST_GENERATEPOINTS.sql @@ -7,9 +7,9 @@ CREATE OR REPLACE FUNCTION `@@BQ_DATASET@@.__ST_GENERATEPOINTS` RETURNS ARRAY DETERMINISTIC LANGUAGE js -OPTIONS (library = ["@@BQ_LIBRARY_BUCKET@@"]) +OPTIONS (library = ["@@BQ_LIBRARY_RANDOM_BUCKET@@"]) AS """ - return lib.random.generateRandomPointsInPolygon(JSON.parse(geojson), npoints); + return randomLib.generateRandomPointsInPolygon(JSON.parse(geojson), npoints); """; CREATE OR REPLACE FUNCTION `@@BQ_DATASET@@.ST_GENERATEPOINTS` diff --git a/clouds/bigquery/modules/sql/s2/S2_BOUNDARY.sql b/clouds/bigquery/modules/sql/s2/S2_BOUNDARY.sql index 2c362fc7d..4b6d0b65e 100644 --- a/clouds/bigquery/modules/sql/s2/S2_BOUNDARY.sql +++ b/clouds/bigquery/modules/sql/s2/S2_BOUNDARY.sql @@ -8,14 +8,14 @@ RETURNS STRING DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_S2_BUCKET@@"] ) AS """ if (id == null) { throw new Error('NULL argument passed to UDF'); } - const cornerLongLat = lib.s2.FromHilbertQuadKey(lib.s2.idToKey(id)).getCornerLatLngs(); + const cornerLongLat = s2Lib.FromHilbertQuadKey(s2Lib.idToKey(id)).getCornerLatLngs(); const wkt = `POLYGON((` + cornerLongLat[0]['lng'] + ` ` + cornerLongLat[0]['lat'] + `, ` + diff --git a/clouds/bigquery/modules/sql/s2/S2_CENTER.sql b/clouds/bigquery/modules/sql/s2/S2_CENTER.sql index 74faab5eb..779be266d 100644 --- a/clouds/bigquery/modules/sql/s2/S2_CENTER.sql +++ b/clouds/bigquery/modules/sql/s2/S2_CENTER.sql @@ -6,12 +6,12 @@ CREATE OR REPLACE FUNCTION `@@BQ_DATASET@@.__S2_CENTER`(id INT64) RETURNS STRUCT DETERMINISTIC LANGUAGE js -OPTIONS (library = ["@@BQ_LIBRARY_BUCKET@@"]) +OPTIONS (library = ["@@BQ_LIBRARY_S2_BUCKET@@"]) AS """ if (id == null) { throw new Error('NULL argument passed to UDF'); } - return lib.s2.idToLatLng(String(id)); + return s2Lib.idToLatLng(String(id)); """; CREATE OR REPLACE FUNCTION `@@BQ_DATASET@@.S2_CENTER` diff --git a/clouds/bigquery/modules/sql/s2/S2_FROMHILBERTQUADKEY.sql b/clouds/bigquery/modules/sql/s2/S2_FROMHILBERTQUADKEY.sql index 1ed1734ef..073f621f9 100644 --- a/clouds/bigquery/modules/sql/s2/S2_FROMHILBERTQUADKEY.sql +++ b/clouds/bigquery/modules/sql/s2/S2_FROMHILBERTQUADKEY.sql @@ -8,11 +8,11 @@ RETURNS INT64 DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_S2_BUCKET@@"] ) AS """ if (!quadkey) { throw new Error('NULL argument passed to UDF'); } - return lib.s2.keyToId(quadkey); + return s2Lib.keyToId(quadkey); """; diff --git a/clouds/bigquery/modules/sql/s2/S2_FROMLONGLAT.sql b/clouds/bigquery/modules/sql/s2/S2_FROMLONGLAT.sql index ca9f9ef4f..b1d8108d2 100644 --- a/clouds/bigquery/modules/sql/s2/S2_FROMLONGLAT.sql +++ b/clouds/bigquery/modules/sql/s2/S2_FROMLONGLAT.sql @@ -8,12 +8,12 @@ RETURNS INT64 DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_S2_BUCKET@@"] ) AS """ if (latitude == null || longitude == null || resolution == null) { throw new Error('NULL argument passed to UDF'); } - const key = lib.s2.latLngToKey(Number(latitude), Number(longitude), Number(resolution)); - return lib.s2.keyToId(key); + const key = s2Lib.latLngToKey(Number(latitude), Number(longitude), Number(resolution)); + return s2Lib.keyToId(key); """; diff --git a/clouds/bigquery/modules/sql/s2/S2_TOHILBERTQUADKEY.sql b/clouds/bigquery/modules/sql/s2/S2_TOHILBERTQUADKEY.sql index a9b7824c4..cb60f3776 100644 --- a/clouds/bigquery/modules/sql/s2/S2_TOHILBERTQUADKEY.sql +++ b/clouds/bigquery/modules/sql/s2/S2_TOHILBERTQUADKEY.sql @@ -8,11 +8,11 @@ RETURNS STRING DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_S2_BUCKET@@"] ) AS """ if (id == null) { throw new Error('NULL argument passed to UDF'); } - return lib.s2.idToKey(id); + return s2Lib.idToKey(id); """; diff --git a/clouds/bigquery/modules/sql/transformations/ST_BUFFER.sql b/clouds/bigquery/modules/sql/transformations/ST_BUFFER.sql index 14227cc33..c5d8b89a1 100644 --- a/clouds/bigquery/modules/sql/transformations/ST_BUFFER.sql +++ b/clouds/bigquery/modules/sql/transformations/ST_BUFFER.sql @@ -8,7 +8,7 @@ RETURNS STRING DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_TRANSFORMATIONS_BUCKET@@"] ) AS """ if (!geojson || radius == null) { @@ -21,7 +21,7 @@ AS """ if (steps != null) { options.steps = Number(steps); } - const buffer = lib.transformations.buffer(JSON.parse(geojson), Number(radius), options); + const buffer = transformationsLib.buffer(JSON.parse(geojson), Number(radius), options); return JSON.stringify(buffer.geometry); """; diff --git a/clouds/bigquery/modules/sql/transformations/ST_CENTERMEAN.sql b/clouds/bigquery/modules/sql/transformations/ST_CENTERMEAN.sql index 40b0db2b2..bc0780ae2 100644 --- a/clouds/bigquery/modules/sql/transformations/ST_CENTERMEAN.sql +++ b/clouds/bigquery/modules/sql/transformations/ST_CENTERMEAN.sql @@ -8,13 +8,13 @@ RETURNS STRING DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_TRANSFORMATIONS_BUCKET@@"] ) AS """ if (!geojson) { return null; } - const center = lib.transformations.centerMean(JSON.parse(geojson)); + const center = transformationsLib.centerMean(JSON.parse(geojson)); return JSON.stringify(center.geometry); """; diff --git a/clouds/bigquery/modules/sql/transformations/ST_CENTERMEDIAN.sql b/clouds/bigquery/modules/sql/transformations/ST_CENTERMEDIAN.sql index 413ca93d6..aba6acad3 100644 --- a/clouds/bigquery/modules/sql/transformations/ST_CENTERMEDIAN.sql +++ b/clouds/bigquery/modules/sql/transformations/ST_CENTERMEDIAN.sql @@ -8,13 +8,13 @@ RETURNS STRING DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_TRANSFORMATIONS_BUCKET@@"] ) AS """ if (!geojson) { return null; } - const medianCenter = lib.transformations.centerMedian(lib.transformations.feature(JSON.parse(geojson))); + const medianCenter = transformationsLib.centerMedian(transformationsLib.feature(JSON.parse(geojson))); return JSON.stringify(medianCenter.geometry); """; diff --git a/clouds/bigquery/modules/sql/transformations/ST_CENTEROFMASS.sql b/clouds/bigquery/modules/sql/transformations/ST_CENTEROFMASS.sql index 2f811310e..f7c362409 100644 --- a/clouds/bigquery/modules/sql/transformations/ST_CENTEROFMASS.sql +++ b/clouds/bigquery/modules/sql/transformations/ST_CENTEROFMASS.sql @@ -8,13 +8,13 @@ RETURNS STRING DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_TRANSFORMATIONS_BUCKET@@"] ) AS """ if (!geojson) { return null; } - const center = lib.transformations.centerOfMass(JSON.parse(geojson)); + const center = transformationsLib.centerOfMass(JSON.parse(geojson)); return JSON.stringify(center.geometry); """; diff --git a/clouds/bigquery/modules/sql/transformations/ST_CONCAVEHULL.sql b/clouds/bigquery/modules/sql/transformations/ST_CONCAVEHULL.sql index 3792b2223..d639b2148 100644 --- a/clouds/bigquery/modules/sql/transformations/ST_CONCAVEHULL.sql +++ b/clouds/bigquery/modules/sql/transformations/ST_CONCAVEHULL.sql @@ -8,7 +8,7 @@ RETURNS STRING DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_TRANSFORMATIONS_BUCKET@@"] ) AS """ if (!geojson) { @@ -22,25 +22,25 @@ AS """ options.units = units; } - const multiPoints = lib.transformations.multiPoint(geojson.map(x => JSON.parse(x).coordinates)); - const nonDuplicates = lib.transformations.cleanCoords(multiPoints).geometry; + const multiPoints = transformationsLib.multiPoint(geojson.map(x => JSON.parse(x).coordinates)); + const nonDuplicates = transformationsLib.cleanCoords(multiPoints).geometry; const arrayCoordinates = nonDuplicates.coordinates; // Point if (arrayCoordinates.length == 1) { - return JSON.stringify(lib.transformations.point(arrayCoordinates[0]).geometry); + return JSON.stringify(transformationsLib.point(arrayCoordinates[0]).geometry); } // Segment if (arrayCoordinates.length == 2) { const start = arrayCoordinates[0]; const end = arrayCoordinates[1]; - const lineString = lib.transformations.lineString([start, end]); + const lineString = transformationsLib.lineString([start, end]); return JSON.stringify(lineString.geometry); } - const featuresCollection = lib.transformations.featureCollection(arrayCoordinates.map(x => lib.transformations.point(x))); - const hull = lib.transformations.concave(featuresCollection, options); + const featuresCollection = transformationsLib.featureCollection(arrayCoordinates.map(x => transformationsLib.point(x))); + const hull = transformationsLib.concave(featuresCollection, options); return JSON.stringify(hull.geometry); """; diff --git a/clouds/bigquery/modules/sql/transformations/ST_DESTINATION.sql b/clouds/bigquery/modules/sql/transformations/ST_DESTINATION.sql index 7ec9fa75a..1da7e427e 100644 --- a/clouds/bigquery/modules/sql/transformations/ST_DESTINATION.sql +++ b/clouds/bigquery/modules/sql/transformations/ST_DESTINATION.sql @@ -8,7 +8,7 @@ RETURNS STRING DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_TRANSFORMATIONS_BUCKET@@"] ) AS """ if (!geojsonStart || distance == null || bearing == null) { @@ -18,7 +18,7 @@ AS """ if (units) { options.units = units; } - const destination = lib.transformations.destination(JSON.parse(geojsonStart), Number(distance), Number(bearing), options); + const destination = transformationsLib.destination(JSON.parse(geojsonStart), Number(distance), Number(bearing), options); return JSON.stringify(destination.geometry); """; diff --git a/clouds/bigquery/modules/sql/transformations/ST_GREATCIRCLE.sql b/clouds/bigquery/modules/sql/transformations/ST_GREATCIRCLE.sql index 9605e2a79..fbf9f0ce9 100644 --- a/clouds/bigquery/modules/sql/transformations/ST_GREATCIRCLE.sql +++ b/clouds/bigquery/modules/sql/transformations/ST_GREATCIRCLE.sql @@ -8,7 +8,7 @@ RETURNS STRING DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_TRANSFORMATIONS_BUCKET@@"] ) AS """ if (!geojsonStart || !geojsonEnd || geojsonEnd === geojsonStart) { @@ -18,7 +18,7 @@ AS """ if (npoints != null) { options.npoints = Number(npoints); } - const greatCircle = lib.transformations.greatCircle(JSON.parse(geojsonStart), JSON.parse(geojsonEnd), options); + const greatCircle = transformationsLib.greatCircle(JSON.parse(geojsonStart), JSON.parse(geojsonEnd), options); return JSON.stringify(greatCircle.geometry); """; diff --git a/clouds/bigquery/modules/sql/transformations/ST_LINE_INTERPOLATE_POINT.sql b/clouds/bigquery/modules/sql/transformations/ST_LINE_INTERPOLATE_POINT.sql index fac984957..6c2c85b1a 100644 --- a/clouds/bigquery/modules/sql/transformations/ST_LINE_INTERPOLATE_POINT.sql +++ b/clouds/bigquery/modules/sql/transformations/ST_LINE_INTERPOLATE_POINT.sql @@ -8,7 +8,7 @@ RETURNS STRING DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_TRANSFORMATIONS_BUCKET@@"] ) AS """ if (!geojson || distance == null) { @@ -18,7 +18,7 @@ AS """ if (units) { options.units = units; } - const along = lib.transformations.along(JSON.parse(geojson), distance, options); + const along = transformationsLib.along(JSON.parse(geojson), distance, options); return JSON.stringify(along.geometry); """; diff --git a/clouds/bigquery/modules/sql/transformations/ST_POINTONSURFACE.sql b/clouds/bigquery/modules/sql/transformations/ST_POINTONSURFACE.sql index 9fcce0575..361545e95 100644 --- a/clouds/bigquery/modules/sql/transformations/ST_POINTONSURFACE.sql +++ b/clouds/bigquery/modules/sql/transformations/ST_POINTONSURFACE.sql @@ -8,13 +8,13 @@ RETURNS STRING DETERMINISTIC LANGUAGE js OPTIONS ( - library = ["@@BQ_LIBRARY_BUCKET@@"] + library = ["@@BQ_LIBRARY_TRANSFORMATIONS_BUCKET@@"] ) AS """ if (!geojson) { return null; } - const center = lib.transformations.pointOnFeature(JSON.parse(geojson)); + const center = transformationsLib.pointOnFeature(JSON.parse(geojson)); return JSON.stringify(center.geometry); """; From 2da5ee5dda58c1fd089a75bab8654ba7e4099adb Mon Sep 17 00:00:00 2001 From: Valentin de la Cruz Barquero <6054336+vdelacruzb@users.noreply.github.com> Date: Mon, 27 Jan 2025 10:49:21 +0100 Subject: [PATCH 21/25] chore(bq): allow supporting multiple libraries in one line (#543) --- clouds/bigquery/common/build_modules.js | 2 +- clouds/bigquery/common/list_libraries.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/clouds/bigquery/common/build_modules.js b/clouds/bigquery/common/build_modules.js index 41f6c3bc7..a112d9acb 100755 --- a/clouds/bigquery/common/build_modules.js +++ b/clouds/bigquery/common/build_modules.js @@ -159,7 +159,7 @@ if (argv.production) { let content = output.map(f => f.content).join(separator); function apply_replacements (text) { - const libraries = [... new Set(text.match(new RegExp('@@BQ_LIBRARY_.*_BUCKET@@', 'g')))]; + const libraries = [... new Set(content.match(new RegExp('@@BQ_LIBRARY_[^@]*?_BUCKET@@', 'g')))]; for (let library of libraries) { let libraryName = library.replace('@@BQ_LIBRARY_', '').replace('_BUCKET@@', '').toLowerCase(); if (makelib == libraryName) { diff --git a/clouds/bigquery/common/list_libraries.js b/clouds/bigquery/common/list_libraries.js index 878c35e17..fc2ecea29 100755 --- a/clouds/bigquery/common/list_libraries.js +++ b/clouds/bigquery/common/list_libraries.js @@ -134,7 +134,7 @@ function add (f, include) { functions.forEach(f => add(f)); const content = output.map(f => f.content).join('\n'); -let libraries = [... new Set(content.match(new RegExp('@@BQ_LIBRARY_.*_BUCKET@@', 'g')))] +let libraries = [... new Set(content.match(new RegExp('@@BQ_LIBRARY_[^@]*?_BUCKET@@', 'g')))] .map(l => l.replace('@@BQ_LIBRARY_', '').replace('_BUCKET@@', '').toLowerCase()); // Exclude libraries pointed by makelib as they are deployed separately From c9781906b8abec0de9062b23737a9315967c6f24 Mon Sep 17 00:00:00 2001 From: Valentin de la Cruz Barquero <6054336+vdelacruzb@users.noreply.github.com> Date: Tue, 28 Jan 2025 10:24:18 +0100 Subject: [PATCH 22/25] chore(sf): SF_ACCOUNT no longer follows "account.region" format (#544) --- clouds/snowflake/common/Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/clouds/snowflake/common/Makefile b/clouds/snowflake/common/Makefile index 3021af7a9..4956908be 100644 --- a/clouds/snowflake/common/Makefile +++ b/clouds/snowflake/common/Makefile @@ -38,8 +38,8 @@ endif ifndef SF_PASSWORD $(error SF_PASSWORD is undefined) endif -ifeq ($(shell echo "$(SF_ACCOUNT)" | grep -E "^([^.]+)\.([^.]+)$$"),) - $(error SF_ACCOUNT is not valid. Must be: .) +ifndef SF_ACCOUNT + $(error SF_ACCOUNT is undefined) endif venv3: From ae3885136206f2ecf6ee6ac745cde866126b53ab Mon Sep 17 00:00:00 2001 From: vdelacruzb Date: Thu, 30 Jan 2025 10:37:48 +0100 Subject: [PATCH 23/25] update changelog --- CHANGELOG.md | 11 +++++++++++ clouds/bigquery/CHANGELOG.md | 4 ++++ clouds/bigquery/version | 2 +- 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 70c97ee15..6f05c8794 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,17 @@ CARTO Analytics Toolbox Core. All notable commits to this project will be documented in this file. +## 2025-01-30 + +- chore(sf): deploy snowflake in CARTO.CARTO when releasing (#536) +- chore: update gh actions versions (#537) +- chore(bq): add skip dependency tag in bigquery build_modules (#538) +- docs(sf|h3): update h3_polyfill_table docs as it does not support points or lines (#539) +- chore(deps): bump jinja2 from 3.1.3 to 3.1.5 in /clouds/databricks/common (#540) +- fix(bq|h3,quadbin): H3_POLYFILL and QUADBIN_POLYFILL functions not working with holes (#542) +- chores(bq): split JS libraries generation (#541, #543) +- chore(sf): SF_ACCOUNT no longer follows "account.region" format (#544) + ## 2024-10-28 - chore(bq): fix @google-cloud/bigquery to version 7.9.0 (#531) diff --git a/clouds/bigquery/CHANGELOG.md b/clouds/bigquery/CHANGELOG.md index d82bac8e9..30a81be22 100644 --- a/clouds/bigquery/CHANGELOG.md +++ b/clouds/bigquery/CHANGELOG.md @@ -4,6 +4,10 @@ CARTO Analytics Toolbox Core for BigQuery. All notable commits to this project will be documented in this file. +## [1.2.5] - 2025-01-30 + +- fix(h3,quadbin): H3_POLYFILL and QUADBIN_POLYFILL functions not working with holes (#542) + ## [1.2.4] - 2024-10-28 - chore: fix @google-cloud/bigquery to version 7.9.0 (#531) diff --git a/clouds/bigquery/version b/clouds/bigquery/version index e8ea05db8..c813fe116 100644 --- a/clouds/bigquery/version +++ b/clouds/bigquery/version @@ -1 +1 @@ -1.2.4 +1.2.5 From 18aecaf75bd7d4c317a4c15e9035c5b9fef908b5 Mon Sep 17 00:00:00 2001 From: vdelacruzb Date: Thu, 30 Jan 2025 10:43:39 +0100 Subject: [PATCH 24/25] udpate changelogs --- CHANGELOG.md | 2 +- clouds/bigquery/CHANGELOG.md | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6f05c8794..3fe33779a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,7 +12,7 @@ All notable commits to this project will be documented in this file. - docs(sf|h3): update h3_polyfill_table docs as it does not support points or lines (#539) - chore(deps): bump jinja2 from 3.1.3 to 3.1.5 in /clouds/databricks/common (#540) - fix(bq|h3,quadbin): H3_POLYFILL and QUADBIN_POLYFILL functions not working with holes (#542) -- chores(bq): split JS libraries generation (#541, #543) +- chore(bq): split JS libraries generation (#541, #543) - chore(sf): SF_ACCOUNT no longer follows "account.region" format (#544) ## 2024-10-28 diff --git a/clouds/bigquery/CHANGELOG.md b/clouds/bigquery/CHANGELOG.md index 30a81be22..eba54b850 100644 --- a/clouds/bigquery/CHANGELOG.md +++ b/clouds/bigquery/CHANGELOG.md @@ -6,7 +6,9 @@ All notable commits to this project will be documented in this file. ## [1.2.5] - 2025-01-30 +- chore: add skip dependency tag in bigquery build_modules (#538) - fix(h3,quadbin): H3_POLYFILL and QUADBIN_POLYFILL functions not working with holes (#542) +- chore(bq): split JS libraries generation (#541, #543) ## [1.2.4] - 2024-10-28 From 0397a49593d0786b0550c480d4834fb7a9176f03 Mon Sep 17 00:00:00 2001 From: vdelacruzb Date: Thu, 30 Jan 2025 10:57:51 +0100 Subject: [PATCH 25/25] update changelog --- clouds/bigquery/CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clouds/bigquery/CHANGELOG.md b/clouds/bigquery/CHANGELOG.md index eba54b850..09929a730 100644 --- a/clouds/bigquery/CHANGELOG.md +++ b/clouds/bigquery/CHANGELOG.md @@ -8,7 +8,7 @@ All notable commits to this project will be documented in this file. - chore: add skip dependency tag in bigquery build_modules (#538) - fix(h3,quadbin): H3_POLYFILL and QUADBIN_POLYFILL functions not working with holes (#542) -- chore(bq): split JS libraries generation (#541, #543) +- chore: split JS libraries generation (#541, #543) ## [1.2.4] - 2024-10-28