Skip to content
Snippets Groups Projects
Commit 48b96dc1 authored by Dóra Kocsis's avatar Dóra Kocsis
Browse files

remove old files from archive, add option to use jsonencode

parent 565c96b8
No related branches found
No related tags found
No related merge requests found
function [dd, dat] = get_dataset(provider,dataset, varargin)
if numel(varargin) > 0
options = parse_options(varargin);
else
options = ['?limit=1000&offset=0&q=&observations=1&align_periods=1&' ...
'dimensions={}'];
end
default_api_base_url = 'https://api.db.nomics.world/v22/series/';
url = join([default_api_base_url, provider, '/', dataset, options]);
dat = webread(join(url));
docs = dat.series.docs;
nseries = dat.dataset.nb_series;
nrow = 1;
for id = 1:nseries
nrow = nrow + length(docs(id).period);
end
dimensions_codes = dat.dataset.dimensions_codes_order;
ndim = length(dimensions_codes);
ncol = ndim + 10;
dd = cell(nrow,ncol);
dd{1,1} = '@frequency';
dd{1,2} = 'dataset_code';
dd{1,3} = 'dataset_name';
dd{1,4} = 'indexed_at';
dd{1,5} = 'original_period';
dd{1,6} = 'period';
dd{1,7} = 'provider_code';
dd{1,8} = 'series_code';
dd{1,9} = 'series name';
dd{1,10} = 'value';
for i = 1:ndim
dd{1,10+i} = dimensions_codes{i};
end
row = 2;
for i = 1:nseries
doc = docs(i);
x_frequency = doc.x_frequency;
dataset_code = doc.dataset_code;
dataset_name = doc.dataset_name;
series_code = doc.series_code;
series_name = doc.series_name;
indexed_at = doc.indexed_at;
provider_code = doc.provider_code;
dimensions = doc.dimensions;
period = doc.period;
period_start_day = doc.period_start_day;
value = doc.value;
for j = 1:length(period)
dd{row,1} = x_frequency;
dd{row,2} = dataset_code;
dd{row,3} = dataset_name;
dd{row,4} = indexed_at;
dd{row,5} = period{j};
dd{row,6} = period_start_day{j};
dd{row,7} = provider_code;
dd{row,8} = series_code;
dd{row,9} = series_name;
if iscell(value)
dd{row,10} = value{j};
else
dd{row,10} = value(j);
end
for k = 1:ndim
dim_code = dimensions.(dimensions_codes{k});
dd{row,10+k} = dim_code;
end
row = row + 1;
end
end
function options_str = parse_options(arguments)
i = 1;
options = struct();
options.limit = 1000;
options.dimensions = '{}';
options.mask = '';
while i <= length(arguments)
if arguments{1} == 'limit'
options.limit = arguments{2};
i = i + 2;
elseif arguments{1} == 'dimensions'
if ~isempty(options.mask)
error('rdb: you can''t use both option dimension and option mask');
end
elseif arguments =='mask'
if options.dimensions ~= '{}'
error('rdb: you can''t use both option dimension and option mask');
end
options.mask = arguments{2};
i = i+2;
else
disp_usage();
end
end
options_str = sprintf('?limit=%d0&q=&observations=1&align_periods=1&dimensions=%s',...
options.limit, options.dimensions);
if options.mask
options_str = options_str + '&' + options.mask;
end
\ No newline at end of file
function [dd, dat] = get_dseries(provider,dataset,series_id)
%url = join(['https://api.db.nomics.world/v22/series/', provider, '/',...
% dataset, '/', series_id, '?observations=true'])
%json = urlread(join(url))
%save test json
f = fopen("test.json");
json = fgets(f);
fclose(f);
dat = jsondecode(json);
dd = ''
docs = dat.series.docs;
first_period = docs.period{1};
if docs.x_frequency == 'annual'
first_period = join([first_period, 'A']);
end
dd = dseries(docs.value, first_period, docs.series_code, docs.series_name)
function [dd, dat] = get_series(provider,dataset,series_id)
%url = join(['https://api.db.nomics.world/v22/series/', provider, '/',...
% dataset, '/', series_id, '?observations=true'])
%json = urlread(join(url))
%save test json
f = fopen("test.json");
json = fgets(f);
fclose(f);
dat = jsondecode(json);
dd = ''
docs = dat.series.docs;
first_period = docs.period{1};
if docs.x_frequency == 'annual'
first_period = join([first_period, 'A']);
end
dd = struct()
dd.values = docs.value
dd.periods = docs.period
dd.code = docs.series_code
dd.name = docs.series_name
function data = rdb(str, varargin)
if nargin == 0
[d, dd] = get_series(str)
elseif nargin == 1
[d, dd] = get_dataset(str, varargin{1})
elseif nargin == 2
[d, dd] = get_series(str, varargin{1}, varargin{2})
elseif nargin > 1
[d, dd] = get_dataset(str, vararin{1}, varargin{2:end})
end
function run_all_tests()
r = [];
r = [r; run_this_test('test_fetch_series_by_code')];
r = [r; run_this_test('test_fetch_series_by_code_mask')];
r = [r; run_this_test('test_fetch_series_by_code_mask_with_plus_in_dimension_code')];
r = [r; run_this_test('test_fetch_series_by_id')];
r = [r; run_this_test('test_fetch_series_by_ids_in_same_dataset')];
r = [r; run_this_test('test_fetch_series_by_ids_in_different_datasets')];
r = [r; run_this_test('test_fetch_series_by_dimension')];
r = [r; run_this_test('test_fetch_series_of_dataset')];
r = [r; run_this_test('test_fetch_series_by_api_link')];
r = [r; run_this_test('test_fetch_series_with_na_values')];
r = [r; run_this_test('test_fetch_series_with_max_nb_series')];
r = [r; run_this_test('test_fetch_series_with_filter_on_one_series')];
print_results(r);
function o = run_this_test(file)
try
tstart = tic;
run([file '.m']);
elapsed = toc(tstart);
o = {file, true, elapsed};
catch
o = {file, false, NaN};
end
function print_results(r)
message = sprintf('Testsuite results:\n');
for i = 1:size(r, 1)
if r{i,2}
message = sprintf('%s\n%s\t\t PASS (%ss)', message, r{i,1}, num2str(r{i,3}));
else
message = sprintf('%s\n%s\t\t FAILED', message, r{i,1});
end
end
disp(message)
This diff is collapsed.
......@@ -51,8 +51,14 @@ for gg = 1:size(grouped_series, 2)
posted_series_list{series} = posted_series;
end
posted_series_list = savejson('',posted_series_list, 'Compact', 1);
posted_series_list = regexprep(posted_series_list,{' [[',']]'},{'[',']'});
if matlab_ver_less_than('9.8')
posted_series_list = savejson('',posted_series_list, 'Compact', 1);
posted_series_list = regexprep(posted_series_list,{' [[',']]'},{'[',']'});
else
posted_series_list = jsonencode(posted_series_list);
end
posted_series_list = transform_json_request(posted_series_list);
json_request = sprintf('{"filters":%s,"series":%s}', dbnomics_filters, posted_series_list);
try
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment