Skip to content

Commit

Permalink
Merge pull request #112 from superfaceai/dev
Browse files Browse the repository at this point in the history
Prepare 2.1.0 release
  • Loading branch information
freaz authored Jan 31, 2023
2 parents 4df7bf0 + 32280f2 commit f688b33
Show file tree
Hide file tree
Showing 15 changed files with 153 additions and 27 deletions.
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,12 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [Unreleased]
### Added
- Added syntax rule for `None` literal

### Fixed
- Profile header rule now strictly requires full semver version in version field, as the spec defines
- Condition to add SingleLineCommentTrivia to termina tokens in sublexer

## [2.0.0] - 2022-11-08
### Added
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Parser

![GitHub Workflow Status](https://img.shields.io/github/workflow/status/superfaceai/parser/CI)
[![GitHub Workflow Status](https://img.shields.io/github/actions/workflow/status/superfaceai/parser/main.yml?branch=dev)](https://github.com/superfaceai/parser/actions/workflows/main.yml)
![NPM](https://img.shields.io/npm/v/@superfaceai/parser)
[![NPM](https://img.shields.io/npm/l/@superfaceai/parser)](LICENSE)
![TypeScript](https://img.shields.io/badge/%3C%2F%3E-Typescript-blue)
Expand Down
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@superfaceai/parser",
"version": "2.0.0",
"version": "2.1.0-rc.0",
"description": "Level 5 autonomous, self-driving API client, https://superface.ai",
"repository": "https://github.com/superfaceai/parser.git",
"source": "lib/index.js",
Expand Down Expand Up @@ -43,7 +43,7 @@
"ts-jest": "^27.1.3"
},
"dependencies": {
"@superfaceai/ast": "^1.2.0",
"@superfaceai/ast": "^1.3.0",
"@types/debug": "^4.1.5",
"debug": "^4.3.3",
"typescript": "^4"
Expand Down
20 changes: 20 additions & 0 deletions src/interpreter/example-validator.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -99,5 +99,25 @@ describe('ExampleValidator', () => {
'ComlinkPrimitiveLiteral - Wrong Structure: expected [boolean | number], but got "string"',
]);
});

it('none', () => {
const profileAst = parseProfileFromSource(
`usecase Test {
result {
f1 string!
}
example fail {
result {
f1 = None
}
}
}`
);

expect(profileAst).not.toBeValidExample([
'ComlinkNoneLiteral - Wrong Structure: expected string!, but got None',
]);
});
});
});
23 changes: 23 additions & 0 deletions src/interpreter/example-validator.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import {
ComlinkAssignmentNode,
ComlinkListLiteralNode,
ComlinkLiteralNode,
ComlinkNoneLiteralNode,
ComlinkObjectLiteralNode,
ComlinkPrimitiveLiteralNode,
EnumDefinitionNode,
Expand Down Expand Up @@ -40,6 +41,7 @@ import {
} from '.';
import {
validateListLiteral,
validateNoneLiteral,
validateObjectLiteral,
validatePrimitiveLiteral,
} from './utils';
Expand Down Expand Up @@ -102,6 +104,8 @@ export class ExampleValidator implements ProfileAstVisitor {
return this.visitComlinkObjectLiteralNode(node);
case 'ComlinkAssignment':
return this.visitComlinkAssignmentNode(node);
case 'ComlinkNoneLiteral':
return this.visitComlinkNoneLiteralNode(node);
// UNUSED
case 'FieldDefinition':
return this.visitFieldDefinitionNode(node);
Expand Down Expand Up @@ -229,6 +233,25 @@ export class ExampleValidator implements ProfileAstVisitor {
return isValid;
}

visitComlinkNoneLiteralNode(node: ComlinkNoneLiteralNode): boolean {
assertDefinedStructure(this.currentStructure);

const { isValid } = validateNoneLiteral(this.currentStructure);

if (!isValid) {
this.errors.push({
kind: 'wrongStructure',
context: {
path: this.getPath(node),
expected: this.currentStructure,
actual: node,
},
});
}

return isValid;
}

visitComlinkListLiteralNode(node: ComlinkListLiteralNode): boolean {
if (this.structureIsPrepared(node)) {
return true;
Expand Down
7 changes: 7 additions & 0 deletions src/interpreter/profile-io-analyzer.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import {
ComlinkAssignmentNode,
ComlinkListLiteralNode,
ComlinkNoneLiteralNode,
ComlinkObjectLiteralNode,
ComlinkPrimitiveLiteralNode,
DocumentedNode,
Expand Down Expand Up @@ -141,6 +142,8 @@ export class ProfileIOAnalyzer implements ProfileAstVisitor {
return this.visitComlinkObjectLiteralNode(node);
case 'ComlinkAssignment':
return this.visitComlinkAssignmentNode(node);
case 'ComlinkNoneLiteral':
return this.visitComlinkNoneLiteralNode(node);
default:
assertUnreachable(node);
}
Expand Down Expand Up @@ -328,6 +331,10 @@ export class ProfileIOAnalyzer implements ProfileAstVisitor {
throw new Error('Not Implemented');
}

visitComlinkNoneLiteralNode(_node: ComlinkNoneLiteralNode): void {
throw new Error('Not Implemented');
}

/**
* store the named fields for later reference
*/
Expand Down
10 changes: 10 additions & 0 deletions src/interpreter/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,8 @@ function formatLiteral(
case 'PrimitiveLiteral':
case 'ComlinkPrimitiveLiteral':
return formatPrimitive(literal.value);
case 'ComlinkNoneLiteral':
return 'None';
case 'ObjectLiteral':
case 'ComlinkObjectLiteral':
return `{${literal.fields.map(formatLiteral).join(', ')}}`;
Expand Down Expand Up @@ -269,6 +271,14 @@ export function validateListLiteral(
return { isValid: false };
}

export function validateNoneLiteral(structure: StructureType): { isValid: boolean } {
if (structure.kind === 'NonNullStructure') {
return { isValid: false };
}

return { isValid: true }
}

export function getOutcomes(
node: MapDefinitionNode | OperationDefinitionNode,
isErrorFilter?: boolean
Expand Down
13 changes: 12 additions & 1 deletion src/language/language.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -467,7 +467,10 @@ describe('profile', () => {
it('should parse profile with examples', () => {
const input = `
usecase Foo {
input { f! string! }
input {
f! string!
fn string
}
result number
error enum {
FORBIDDEN_WORD
Expand All @@ -478,6 +481,7 @@ describe('profile', () => {
input {
"hello has 5 letters"
f = "hello"
fn = None
}
result 5
// TODO: do we want this? async result undefined
Expand Down Expand Up @@ -531,6 +535,13 @@ describe('profile', () => {
title: 'hello has 5 letters',
},
},
{
kind: 'ComlinkAssignment',
key: ['fn'],
value: {
kind: 'ComlinkNoneLiteral'
}
}
],
},
},
Expand Down
3 changes: 2 additions & 1 deletion src/language/lexer/lexer.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,7 @@ describe('lexer', () => {
test('identifiers', () => {
const lexer = new Lexer(
new Source(
'ident my fier pls usecaseNOT modelout boolean b00lean a123456789_0'
'ident my fier pls usecaseNOT modelout boolean b00lean a123456789_0 None'
)
);
const expectedTokens: (LexerTokenData | IdentifierValue)[] = [
Expand All @@ -362,6 +362,7 @@ describe('lexer', () => {
'boolean',
'b00lean',
'a123456789_0',
'None',
{ kind: LexerTokenKind.SEPARATOR, separator: 'EOF' },
];

Expand Down
2 changes: 1 addition & 1 deletion src/language/lexer/sublexer/jessie/expression.ts
Original file line number Diff line number Diff line change
Expand Up @@ -864,7 +864,7 @@ function resolveTerminationTokens(

// Tokens that are always terminator tokens
// What isn't included here is the ts.SyntaxKind.EndOfFileToken token - this token is hardcoded into the scanner because it ignores nesting
if (!(ts.SyntaxKind.SingleLineCommentTrivia in termTokens)) {
if (!termTokens.includes(ts.SyntaxKind.SingleLineCommentTrivia)) {
termTokens.push(ts.SyntaxKind.SingleLineCommentTrivia);
}

Expand Down
2 changes: 1 addition & 1 deletion src/language/lexer/token.ts
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ export class LexerToken {
/** Data of the token. */
readonly data: LexerTokenData,
readonly location: LocationSpan
) {}
) { }

isSOF(): boolean {
return (
Expand Down
13 changes: 13 additions & 0 deletions src/language/syntax/rules/profile/literal.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import {
ComlinkAssignmentNode,
ComlinkListLiteralNode,
ComlinkLiteralNode,
ComlinkNoneLiteralNode,
ComlinkObjectLiteralNode,
ComlinkPrimitiveLiteralNode,
} from '@superfaceai/ast';
Expand Down Expand Up @@ -90,8 +91,20 @@ export const COMLINK_LIST_LITERAL: SyntaxRule<
}
);

export const COMLINK_NONE_LITERAL: SyntaxRule<
WithLocation<ComlinkNoneLiteralNode>
> = SyntaxRule.identifier('None').map(
(match): WithLocation<ComlinkNoneLiteralNode> => {
return {
kind: 'ComlinkNoneLiteral',
location: match.location,
};
}
);

export const COMLINK_LITERAL = SyntaxRule.or(
COMLINK_PRIMITIVE_LITERAL,
COMLINK_NONE_LITERAL,
COMLINK_OBJECT_LITERAL,
COMLINK_LIST_LITERAL
);
Expand Down
45 changes: 40 additions & 5 deletions src/language/syntax/rules/profile/profile.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1484,7 +1484,7 @@ describe('profile syntax rules', () => {

tesTok({ kind: LexerTokenKind.IDENTIFIER, identifier: 'version' }),
tesTok({ kind: LexerTokenKind.OPERATOR, operator: '=' }),
tesTok({ kind: LexerTokenKind.STRING, string: '11.12' }),
tesTok({ kind: LexerTokenKind.STRING, string: '11.12.3' }),
];
const stream = new ArrayLexerStream(tokens);

Expand All @@ -1498,7 +1498,7 @@ describe('profile syntax rules', () => {
version: {
major: 11,
minor: 12,
patch: 0,
patch: 3,
},
documentation: {
title: 'Title',
Expand All @@ -1522,7 +1522,7 @@ describe('profile syntax rules', () => {

tesTok({ kind: LexerTokenKind.IDENTIFIER, identifier: 'version' }),
tesTok({ kind: LexerTokenKind.OPERATOR, operator: '=' }),
tesTok({ kind: LexerTokenKind.STRING, string: '1' }),
tesTok({ kind: LexerTokenKind.STRING, string: '1.2.3' }),

tesTok({ kind: LexerTokenKind.IDENTIFIER, identifier: 'model' }),
tesTok({ kind: LexerTokenKind.IDENTIFIER, identifier: 'model1' }),
Expand Down Expand Up @@ -1559,8 +1559,8 @@ describe('profile syntax rules', () => {
name: 'profile',
version: {
major: 1,
minor: 0,
patch: 0,
minor: 2,
patch: 3,
},
},
tokens[1],
Expand Down Expand Up @@ -1630,6 +1630,23 @@ describe('profile syntax rules', () => {
)
);
});

it('should require full version in profile header', () => {
const tokens: ReadonlyArray<LexerToken> = [
tesTok({ kind: LexerTokenKind.IDENTIFIER, identifier: 'name' }),
tesTok({ kind: LexerTokenKind.OPERATOR, operator: '=' }),
tesTok({ kind: LexerTokenKind.STRING, string: 'scope/profile' }),

tesTok({ kind: LexerTokenKind.IDENTIFIER, identifier: 'version' }),
tesTok({ kind: LexerTokenKind.OPERATOR, operator: '=' }),
tesTok({ kind: LexerTokenKind.STRING, string: '1.2' }),
];
const stream = new ArrayLexerStream(tokens);

const rule = rules.PROFILE_HEADER;

expect(rule.tryMatch(stream)).not.toBeAMatch();
});
});

describe('comlink literals', () => {
Expand All @@ -1652,6 +1669,24 @@ describe('profile syntax rules', () => {
);
});

it('should parse none literal', () => {
const tokens: ReadonlyArray<LexerToken> = [
tesTok({ kind: LexerTokenKind.IDENTIFIER, identifier: 'None' }),
];
const stream = new ArrayLexerStream(tokens);

const rule = rules.COMLINK_NONE_LITERAL;

expect(rule.tryMatch(stream)).toBeAMatch(
tesMatch(
{
kind: 'ComlinkNoneLiteral',
},
tokens[0]
)
);
});

it('should parse object literal', () => {
const tokens: ReadonlyArray<LexerToken> = [
tesTok({ kind: LexerTokenKind.SEPARATOR, separator: '{' }),
Expand Down
10 changes: 5 additions & 5 deletions src/language/syntax/rules/profile/profile.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@ import {
UseCaseSlotDefinitionNode,
} from '@superfaceai/ast';

import { ProfileVersion } from '../../../../common';
import { parseDocumentId } from '../../../../common/document/parser';
import { VersionRange } from '../../../../common/document/version';
import { PARSED_AST_VERSION, PARSED_VERSION } from '../../../../metadata';
import { IdentifierTokenData, LexerTokenKind } from '../../../lexer/token';
import {
Expand Down Expand Up @@ -568,22 +568,22 @@ const PROFILE_VERSION = SyntaxRule.followedBy(
} & HasLocation
>(version => {
try {
const parsedVersion = VersionRange.fromString(version.data.string);
const parsedVersion = ProfileVersion.fromString(version.data.string);

return {
kind: 'match',
value: {
major: parsedVersion.major,
minor: parsedVersion.minor ?? 0,
patch: parsedVersion.patch ?? 0,
minor: parsedVersion.minor,
patch: parsedVersion.patch,
label: parsedVersion.label,
location: version.location,
},
};
} catch (error) {
return { kind: 'nomatch' };
}
}, 'semver version')
}, 'semver version in format `<major>.<minor>.<patch>`')
).map(([keyword, op, version]) => {
return {
version: {
Expand Down
Loading

0 comments on commit f688b33

Please sign in to comment.