/**** ACCOUNT ***************************************/
/* Show user SQL history with time and credits used */
use role accountadmin;
use database snowflake;
use schema account_usage;
use warehouse interactive_wh;
alter session set timestamp_output_format = 'YYYY-MM-DD HH:MI:SS';
select start_time, query_type, user_name, warehouse_name, to_number(total_elapsed_time/1000,10,2) as seconds, warehouse_size, (CREDITS_USED_CLOUD_SERVICES*300) as cost, query_text
from snowflake.account_usage.query_history
where warehouse_size is not NULL
--and query_type = 'SELECT'
--and warehouse_name = 'XXXXX'
--and role_name = 'XXXXX'
--and user_name = 'XXXXX'
order by 4 desc;
use role accountadmin;
use database snowflake;
use schema account_usage;
use warehouse interactive_wh;
select start_time, query_type, Role_name, user_name, warehouse_name, to_number(total_elapsed_time/1000,10,2) as seconds, bytes_scanned, rows_produced, rows_inserted, (CREDITS_USED_CLOUD_SERVICES*300) as cost, query_text
from snowflake.account_usage.query_history
where query_type <> 'GET_FILES'
and query_type <> 'PUT_FILES'
and query_type <> 'SHOW'
and to_date(start_time) between '2020-07-09' and '2020-08-25';
/* show space taken by tables */
select table_name, table_schema, to_varchar(active_bytes/1024/1024, 'F999,999,999,999.9') as Mbytes from snowflake.account_usage.table_storage_metrics
order by 3 desc;
/* Show Credits used by Warehouse */
use role accountadmin;
use database snowflake;
use schema account_usage;
select
warehouse_name,
sum(credits_used_cloud_services) credits_used_cloud_services,
sum(credits_used_compute) credits_used_compute,
sum(credits_used) credits_used
from snowflake.account_usage.warehouse_metering_history
where
start_time >= '2020-07-01 00:00:01'
group by 1;
/**** WAREHOUSE ***************************************/
/* Warehouse settings, auto suspend to 1 minute */
ALTER WAREHOUSE "MY_WAREHOUSE" SET WAREHOUSE_SIZE = 'XSMALL' AUTO_SUSPEND = 60 AUTO_RESUME = TRUE MIN_CLUSTER_COUNT = 1 MAX_CLUSTER_COUNT = 1 SCALING_POLICY = 'STANDARD' COMMENT = '';
/* warehouse change name */
ALTER WAREHOUSE IF EXISTS "PBI_USER" RENAME TO "PBI_WH";
SHOW tasks;
create or replace temporary table _tasks as
SELECT
*,
concat("database_name",'.',"schema_name",'.',"name") AS fully_qualified_name //We use this as an identifier
FROM table(result_scan(last_query_id())) cc;
select * from _tasks;
alter warehouse test_wh set warehouse_size = medium;
/**** DATABASE ***************************************/
Alter database MY_DB rename to MY_PROD_DB;
CREATE DATABASE MY_DB COMMENT = 'Test DB';
CREATE SCHEMA "My_DB"."My_SCHEMA";
/**** SCHEMAS ***************************************/
use role accountadmin;
use database DW_PROD_DB;
select catalog_name as database,
schema_name,
schema_owner,
created,
last_altered
from information_schema.schemata
order by schema_name;
select catalog_name || ', ' || schema_name || ', ' || schema_owner as "Database-schema-owner"
from information_schema.schemata
order by schema_name;
/**** USER ***************************************/
show grants to role 'MY-ROLE';
alter user MY-USER SET DEFAUt_ROLE = 'PUBLIC';
alter user MY-USER set default_warehouse = 'MY-WAREHOUSE' default_role = 'MY-ROLE';
use role accountadmin;
use database dw_prod_db;
create role if not exists analyst_reader comment='Read-only access';
use role accountadmin;
use database my-db;
grant select on all tables in database my-db to role MY-ROLE;
grant select on future tables in database my-db to role MY-ROLE;
grant select on future tables in database my-db to role MY-ROLE;
grant select on all tables in database my-db to role MY-ROLE;
grant usage on all schemas in database my-db to role MY-ROLE;
grant select on future tables in database my-db to role MY-ROLE;
grant usage on future schemas in database my-db to role MY-ROLE;
CREATE USER MY-USER PASSWORD = '***********' LOGIN_NAME = 'MY-USER' DISPLAY_NAME = 'MY-USER'
FIRST_NAME = 'first' LAST_NAME = 'Last' EMAIL = 'MY-USER@gmail.com'
DEFAULT_ROLE = "MY-ROLE" DEFAULT_WAREHOUSE = 'MY-WAREHOUSE' MUST_CHANGE_PASSWORD = TRUE;
GRANT ROLE "MY-ROLE" TO USER MY-USER;
Drop schema DEMO_DB.DEMO_DB;
/**** COPY ***************************************/
copy into company_summary
from @my_s3_stage/my-table-redshift-unload000;
copy into employee_summary
from @my_s3_stage file_format=(format_name = MY_CSV_FORMAT)
ON_ERROR=CONTINUE
pattern='my-table-redshift-unload0001_part_*';
/* Show S3 Bucket files */
select $1, $2 from @my_s3_stage/ACCOUNTS limit 100;
copy into opportunity
from s3://ops-backups/dev/opportunity_sf
credentials=(aws_key_id='xxxxxxxxxxxxxxxxxxxxxxxx'
aws_secret_key='xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx')
file_format = (type = csv)
/* CSV stage with credentials for copy command */
create or replace stage my_s3_stage url='ops-backups/dev/'
credentials=(aws_key_id='<key_id>' aws_secret_key='<key_secret>’')
encryption=(master_key = '<encryption_key>')
file_format = auto;
use database MY_DB;
list @my_s3_stage/ACCOUNTS/;
/**** TABLE ***************************************/
create or replace temporary table _tasks as
SELECT
*,
concat("database_name",'.',"schema_name",'.',"name") AS fully_qualified_name //We use this as an identifier
FROM table(result_scan(last_query_id())) cc;
/*** Create table ***/
create or replace table company_summary (
company_id bigint,
status_desc varchar(32),
name_size varchar(255),
name_version varchar(255),
name_industry varchar(255)
);
select *
from snowflake.information_schema.tables
where table_schema <> 'INFORMATION_SCHEMA'
limit 10;
/**** SELECT ***************************************/
/* select */
select * from "DEMO_DB"."PUBLIC"."COMPANY_SUMMARY"
limit 10;
select datediff (day, start_timestamp,end_timestamp) as diff_days, count(*) from employee_summary e
left outer join public.company_summary c on (e.company_id = c.app_company_id)where start_timestamp <> end_timestamp
and (c.is_demo <> 1 or is_demo is null)
group by 1
order by 2 desc
limit 10000;
SELECT e.city, count(*), count(distinct(e.company_id)) companies
FROM public.employee_summary e
left outer join public.company_summary c on (e.company_id = c.app_company_id)
where city in ('Provo', 'Orem', 'Lindon', 'Lehi', 'American Fork','Springville', 'Spanish Fork')
and (c.is_demo <> 1 or is_demo is null)
group by 1
order by 2 desc
limit 1000;
select count(*), country, city from EMPLOYEE_SUMMARY
group by 2,3
order by 1 desc
limit 300;
/**** INSERT ***************************************/
/* insert */
INSERT INTO employee VALUES(100, 'Jack', 'Watts', '1987-11-17', '0', 100, 'M');
select to_date(created_date), sum(amount) as amount
from dw_prod_db.zuora.invoice
where created_date > '2020-07-01'
group by 1
Order by 1;
select table_name, Column_name, ordinal_position, column_default, is_nullable, data_type, character_maximum_length, numeric_precision, numeric_precision_radix, numeric_scale
from DW_PROD_DB.INFORMATION_SCHEMA.COLUMNS
where table_schema = 'SALESFORCE'
AND table_name = 'ACCOUNT'
order by ordinal_position;