Merge branch 'main' of gitlab.cs.wallawalla.edu:lustje/language-interpreter-lab into josh_11_7

This commit is contained in:
vel 2024-11-10 15:22:02 -08:00
commit f34b1541ad
Signed by: velvox
GPG Key ID: 59D9762F674151DF
11 changed files with 2181 additions and 7 deletions

9
.gitignore vendored
View File

@ -1,7 +1,10 @@
.DS_Store
*.out
scanner.c
lab-4/token.h
*.o
*.out
*.output
parser.c
parser.h
parser.output
scanner.c
test_print.txt
test_evaluate.txt

View File

@ -25,3 +25,15 @@ lab_4_tests:
- echo "Lab 4 - Bison Parser"
- make test
- ./parser_test.out
lab_5_tests:
stage: test
image: gitlab.cs.wallawalla.edu:5050/cs_department/docker-images/cpp
script:
- apt update
- apt install -y flex bison
- bison --version
- cd lab-5
- echo "Lab 5 - Interpreter"
- make test
- ./interpreter_test.out

View File

@ -313,3 +313,272 @@ UTEST(scanner, quadratic) {
++index;
} while (t != TOKEN_EOF);
}
UTEST(scanner, sorting) {
struct token_st tokens[] = {
{TOKEN_IDENTIFICATION, "IDENTIFICATION"},
{TOKEN_KEYWORD_DIVISION, "DIVISION"},
{TOKEN_DOT, "."},
{TOKEN_PROGRAM_ID, "PROGRAM-ID"},
{TOKEN_DOT, "."},
{TOKEN_IDENT, "sorting"},
{TOKEN_DOT, "."},
{TOKEN_KEYWORD_DATA, "DATA"},
{TOKEN_KEYWORD_DIVISION, "DIVISION"},
{TOKEN_DOT, "."},
{TOKEN_WORKING_STORAGE, "WORKING-STORAGE"},
{TOKEN_KEYWORD_SECTION, "SECTION"},
{TOKEN_DOT, "."},
{TOKEN_INTEGER, "01"},
{TOKEN_IDENT, "WS-SORT-AREA"},
{TOKEN_DOT, "."},
{TOKEN_INTEGER, "05"},
{TOKEN_IDENT, "WS-SORT-TABLE"},
{TOKEN_DOT, "."},
{TOKEN_INTEGER, "10"},
{TOKEN_IDENT, "WS-SORT-ROW"},
{TOKEN_PICTURE, "PIC"},
{TOKEN_ALPHANUMERIC, "X"},
{TOKEN_LEFT_PARENTHESIS, "("},
{TOKEN_INTEGER, "10"},
{TOKEN_RIGHT_PARENTHESIS, ")"},
{TOKEN_KEYWORD_OCCURS, "OCCURS"},
{TOKEN_INTEGER, "100"},
{TOKEN_DOT, "."},
{TOKEN_INTEGER, "05"},
{TOKEN_IDENT, "WS-TEMP-ROW"},
{TOKEN_PICTURE, "PIC"},
{TOKEN_ALPHANUMERIC, "X"},
{TOKEN_LEFT_PARENTHESIS, "("},
{TOKEN_INTEGER, "10"},
{TOKEN_RIGHT_PARENTHESIS, ")"},
{TOKEN_DOT, "."},
{TOKEN_INTEGER, "05"},
{TOKEN_IDENT, "WS-ROW-MAX"},
{TOKEN_PICTURE, "PIC"},
{TOKEN_SIGNED_NUMERIC, "S9"},
{TOKEN_LEFT_PARENTHESIS, "("},
{TOKEN_INTEGER, "4"},
{TOKEN_RIGHT_PARENTHESIS, ")"},
{TOKEN_COMPUTATION_LEVEL_0, "COMP"},
{TOKEN_KEYWORD_VALUE, "VALUE"},
{TOKEN_INTEGER, "100"},
{TOKEN_DOT, "."},
{TOKEN_INTEGER, "05"},
{TOKEN_IDENT, "WS-SORT-MAX"},
{TOKEN_PICTURE, "PIC"},
{TOKEN_SIGNED_NUMERIC, "S9"},
{TOKEN_LEFT_PARENTHESIS, "("},
{TOKEN_INTEGER, "4"},
{TOKEN_RIGHT_PARENTHESIS, ")"},
{TOKEN_COMPUTATION_LEVEL_0, "COMP"},
{TOKEN_DOT, "."},
{TOKEN_INTEGER, "05"},
{TOKEN_IDENT, "WS-I"},
{TOKEN_PICTURE, "PIC"},
{TOKEN_SIGNED_NUMERIC, "S9"},
{TOKEN_LEFT_PARENTHESIS, "("},
{TOKEN_INTEGER, "4"},
{TOKEN_RIGHT_PARENTHESIS, ")"},
{TOKEN_COMPUTATION_LEVEL_0, "COMP"},
{TOKEN_DOT, "."},
{TOKEN_INTEGER, "05"},
{TOKEN_IDENT, "WS-J"},
{TOKEN_PICTURE, "PIC"},
{TOKEN_SIGNED_NUMERIC, "S9"},
{TOKEN_LEFT_PARENTHESIS, "("},
{TOKEN_INTEGER, "4"},
{TOKEN_RIGHT_PARENTHESIS, ")"},
{TOKEN_COMPUTATION_LEVEL_0, "COMP"},
{TOKEN_DOT, "."},
{TOKEN_INTEGER, "05"},
{TOKEN_IDENT, "WS-INDEX"},
{TOKEN_PICTURE, "PIC"},
{TOKEN_SIGNED_NUMERIC, "S9"},
{TOKEN_LEFT_PARENTHESIS, "("},
{TOKEN_INTEGER, "4"},
{TOKEN_RIGHT_PARENTHESIS, ")"},
{TOKEN_COMPUTATION_LEVEL_0, "COMP"},
{TOKEN_DOT, "."},
{TOKEN_PROCEDURE, "PROCEDURE"},
{TOKEN_KEYWORD_DIVISION, "DIVISION"},
{TOKEN_DOT, "."},
{TOKEN_COMMENT, "*> Initialize test data"},
{TOKEN_MOVE, "MOVE"},
{TOKEN_STRING, "\"30\""},
{TOKEN_KEYWORD_TO, "TO"},
{TOKEN_IDENT, "WS-SORT-ROW"},
{TOKEN_LEFT_PARENTHESIS, "("},
{TOKEN_INTEGER, "1"},
{TOKEN_RIGHT_PARENTHESIS, ")"},
{TOKEN_MOVE, "MOVE"},
{TOKEN_STRING, "\"10\""},
{TOKEN_KEYWORD_TO, "TO"},
{TOKEN_IDENT, "WS-SORT-ROW"},
{TOKEN_LEFT_PARENTHESIS, "("},
{TOKEN_INTEGER, "2"},
{TOKEN_RIGHT_PARENTHESIS, ")"},
{TOKEN_MOVE, "MOVE"},
{TOKEN_STRING, "\"50\""},
{TOKEN_KEYWORD_TO, "TO"},
{TOKEN_IDENT, "WS-SORT-ROW"},
{TOKEN_LEFT_PARENTHESIS, "("},
{TOKEN_INTEGER, "3"},
{TOKEN_RIGHT_PARENTHESIS, ")"},
{TOKEN_MOVE, "MOVE"},
{TOKEN_STRING, "\"20\""},
{TOKEN_KEYWORD_TO, "TO"},
{TOKEN_IDENT, "WS-SORT-ROW"},
{TOKEN_LEFT_PARENTHESIS, "("},
{TOKEN_INTEGER, "4"},
{TOKEN_RIGHT_PARENTHESIS, ")"},
{TOKEN_MOVE, "MOVE"},
{TOKEN_STRING, "\"40\""},
{TOKEN_KEYWORD_TO, "TO"},
{TOKEN_IDENT, "WS-SORT-ROW"},
{TOKEN_LEFT_PARENTHESIS, "("},
{TOKEN_INTEGER, "5"},
{TOKEN_RIGHT_PARENTHESIS, ")"},
{TOKEN_MOVE, "MOVE"},
{TOKEN_INTEGER, "5"},
{TOKEN_KEYWORD_TO, "TO"},
{TOKEN_IDENT, "WS-SORT-MAX"},
{TOKEN_COMMENT, "*> * Display original array"},
{TOKEN_DISPLAY, "DISPLAY"},
{TOKEN_STRING, "\"Original Array Contents:\""},
{TOKEN_DISPLAY, "DISPLAY"},
{TOKEN_STRING, "\"---------------------\""},
{TOKEN_PERFORM, "PERFORM"},
{TOKEN_VARYING, "VARYING"},
{TOKEN_IDENT, "WS-INDEX"},
{TOKEN_KEYWORD_FROM, "FROM"},
{TOKEN_INTEGER, "1"},
{TOKEN_KEYWORD_BY, "BY"},
{TOKEN_INTEGER, "1"},
{TOKEN_UNTIL, "UNTIL"},
{TOKEN_IDENT, "WS-INDEX"},
{TOKEN_GREATER_THAN, ">"},
{TOKEN_IDENT, "WS-SORT-MAX"},
{TOKEN_DISPLAY, "DISPLAY"},
{TOKEN_STRING, "\"Element \""},
{TOKEN_IDENT, "WS-INDEX"},
{TOKEN_STRING, "\": \""},
{TOKEN_IDENT, "WS-SORT-ROW"},
{TOKEN_LEFT_PARENTHESIS, "("},
{TOKEN_IDENT, "WS-INDEX"},
{TOKEN_RIGHT_PARENTHESIS, ")"},
{TOKEN_END_PERFORM, "END-PERFORM"},
{TOKEN_DISPLAY, "DISPLAY"},
{TOKEN_SPACE, "SPACE"},
{TOKEN_COMMENT, "*> * Simplified bubble sort"},
{TOKEN_PERFORM, "PERFORM"},
{TOKEN_VARYING, "VARYING"},
{TOKEN_IDENT, "WS-I"},
{TOKEN_KEYWORD_FROM, "FROM"},
{TOKEN_INTEGER, "1"},
{TOKEN_KEYWORD_BY, "BY"},
{TOKEN_INTEGER, "1"},
{TOKEN_UNTIL, "UNTIL"},
{TOKEN_IDENT, "WS-I"},
{TOKEN_GREATER_THAN, ">"},
{TOKEN_IDENT, "WS-SORT-MAX"},
{TOKEN_SUB, "-"},
{TOKEN_INTEGER, "1"},
{TOKEN_PERFORM, "PERFORM"},
{TOKEN_VARYING, "VARYING"},
{TOKEN_IDENT, "WS-J"},
{TOKEN_KEYWORD_FROM, "FROM"},
{TOKEN_INTEGER, "1"},
{TOKEN_KEYWORD_BY, "BY"},
{TOKEN_INTEGER, "1"},
{TOKEN_UNTIL, "UNTIL"},
{TOKEN_IDENT, "WS-J"},
{TOKEN_GREATER_THAN, ">"},
{TOKEN_IDENT, "WS-SORT-MAX"},
{TOKEN_SUB, "-"},
{TOKEN_IDENT, "WS-I"},
{TOKEN_IF, "IF"},
{TOKEN_IDENT, "WS-SORT-ROW"},
{TOKEN_LEFT_PARENTHESIS, "("},
{TOKEN_IDENT, "WS-J"},
{TOKEN_RIGHT_PARENTHESIS, ")"},
{TOKEN_GREATER_THAN, ">"},
{TOKEN_IDENT, "WS-SORT-ROW"},
{TOKEN_LEFT_PARENTHESIS, "("},
{TOKEN_IDENT, "WS-J"},
{TOKEN_ADD, "+"},
{TOKEN_INTEGER, "1"},
{TOKEN_RIGHT_PARENTHESIS, ")"},
{TOKEN_MOVE, "MOVE"},
{TOKEN_IDENT, "WS-SORT-ROW"},
{TOKEN_LEFT_PARENTHESIS, "("},
{TOKEN_IDENT, "WS-J"},
{TOKEN_RIGHT_PARENTHESIS, ")"},
{TOKEN_KEYWORD_TO, "TO"},
{TOKEN_IDENT, "WS-TEMP-ROW"},
{TOKEN_MOVE, "MOVE"},
{TOKEN_IDENT, "WS-SORT-ROW"},
{TOKEN_LEFT_PARENTHESIS, "("},
{TOKEN_IDENT, "WS-J"},
{TOKEN_ADD, "+"},
{TOKEN_INTEGER, "1"},
{TOKEN_RIGHT_PARENTHESIS, ")"},
{TOKEN_KEYWORD_TO, "TO"},
{TOKEN_IDENT, "WS-SORT-ROW"},
{TOKEN_LEFT_PARENTHESIS, "("},
{TOKEN_IDENT, "WS-J"},
{TOKEN_RIGHT_PARENTHESIS, ")"},
{TOKEN_MOVE, "MOVE"},
{TOKEN_IDENT, "WS-TEMP-ROW"},
{TOKEN_KEYWORD_TO, "TO"},
{TOKEN_IDENT, "WS-SORT-ROW"},
{TOKEN_LEFT_PARENTHESIS, "("},
{TOKEN_IDENT, "WS-J"},
{TOKEN_ADD, "+"},
{TOKEN_INTEGER, "1"},
{TOKEN_RIGHT_PARENTHESIS, ")"},
{TOKEN_END_IF, "END-IF"},
{TOKEN_END_PERFORM, "END-PERFORM"},
{TOKEN_END_PERFORM, "END-PERFORM"},
{TOKEN_COMMENT, "*> * Display sorted array"},
{TOKEN_DISPLAY, "DISPLAY"},
{TOKEN_STRING, "\"Sorted Array Contents:\""},
{TOKEN_DISPLAY, "DISPLAY"},
{TOKEN_STRING, "\"--------------------\""},
{TOKEN_PERFORM, "PERFORM"},
{TOKEN_VARYING, "VARYING"},
{TOKEN_IDENT, "WS-INDEX"},
{TOKEN_KEYWORD_FROM, "FROM"},
{TOKEN_INTEGER, "1"},
{TOKEN_KEYWORD_BY, "BY"},
{TOKEN_INTEGER, "1"},
{TOKEN_UNTIL, "UNTIL"},
{TOKEN_IDENT, "WS-INDEX"},
{TOKEN_GREATER_THAN, ">"},
{TOKEN_IDENT, "WS-SORT-MAX"},
{TOKEN_DISPLAY, "DISPLAY"},
{TOKEN_STRING, "\"Element \""},
{TOKEN_IDENT, "WS-INDEX"},
{TOKEN_STRING, "\": \""},
{TOKEN_IDENT, "WS-SORT-ROW"},
{TOKEN_LEFT_PARENTHESIS, "("},
{TOKEN_IDENT, "WS-INDEX"},
{TOKEN_RIGHT_PARENTHESIS, ")"},
{TOKEN_END_PERFORM, "END-PERFORM"},
{TOKEN_DOT, "."},
{TOKEN_STOP, "STOP"},
{TOKEN_RUN, "RUN"},
{TOKEN_DOT, "."},
{TOKEN_EOF, ""},
};
yyin = fopen("samples/sorting-snippet.cbl", "r");
ASSERT_TRUE(yyin);
int index = 0;
token_t t;
do {
printf("index: %d token: %d text: %s\n", index, t, yytext);
ASSERT_EQ(tokens[index].t, (t = yylex()));
ASSERT_STREQ(tokens[index].p, yytext);
++index;
} while (t != TOKEN_EOF);
}

View File

@ -14,7 +14,6 @@ extern int yylineno;
UTEST_MAIN();
UTEST(parser, math) {
// Must include the null character to terminate input
char string[] = "1+8/4-3;\0";
@ -83,3 +82,56 @@ UTEST(parser, branching) {
// Assert the result to test correctness
ASSERT_EQ(result, 0);
}
UTEST(parser, looping) {
char string[] = "PERFORM VARYING I FROM 1 BY 1 UNTIL I > 10 MOVE I TO A(I)\0";
YY_BUFFER_STATE buffer = yy_scan_buffer(string, sizeof(string));
yylineno = 1;
int result = yyparse();
yy_delete_buffer(buffer);
// Assert the result to test correctness
ASSERT_EQ(result, 0);
}
UTEST(parser, sorting) {
// Read sample file as input
yyin = fopen("samples/sorting-snippet.cbl", "r");
yyrestart(yyin);
ASSERT_TRUE(yyin);
yylineno = 1;
int result = yyparse();
// Assert the result to test correctness
ASSERT_EQ(result, 0);
}
UTEST(parser, quadratic) {
// Read sample file as input
yyin = fopen("samples/quadratic-snippet.cbl", "r");
yyrestart(yyin);
ASSERT_TRUE(yyin);
yylineno = 1;
int result = yyparse();
// Assert the result to test correctness
ASSERT_EQ(result, 0);
}
UTEST(parser, boolean) {
// Must include the null character to terminate input
char string[] = "IF A > B THEN Var = TRUE ELSE Var = FALSE\0";
YY_BUFFER_STATE buffer = yy_scan_buffer(string, sizeof(string));
yylineno = 1;
int result = yyparse();
yy_delete_buffer(buffer);
// Assert the result to test correctness
ASSERT_EQ(result, 0);
}

View File

@ -1,9 +1,12 @@
# The top level rule indicates how to link everything together into calc
# The top level rule indicates how to link everything together into main
main: main.o symbol_map.o expr.o scanner.o parser.o
gcc main.o symbol_map.o expr.o scanner.o parser.o -o interpreter.out -lm
test: main_test.o symbol_map.o expr.o scanner.o parser.o
gcc main_test.o symbol_map.o expr.o scanner.o parser.o -o interpreter_test.out -lm
# This pattern indicates that any .o file depends
# upon the .c file of the same name, and all of the .h files.
# So, if a .o file is needed, it is built automatically.
@ -22,5 +25,5 @@ parser.c parser.h: parser.bison
# clean causes all intermediate files to be deleted.
clean:
rm -f parser.c parser.output parser.h scanner.c *.o interpreter.out
rm -f parser.c parser.output parser.h scanner.c *.o interpreter.out interpreter_test.out test_evaluate.txt test_print.txt

161
lab-5/main_test.c Normal file
View File

@ -0,0 +1,161 @@
// https://github.com/sheredom/utest.h/blob/master/utest.h
#include "utest.h"
#include "expr.h"
#include <stdio.h>
/* Clunky: Declare the parse function generated from parser.bison */
extern int yyparse();
/* Clunky: Declare the result of the parser from parser.bison */
extern struct stmt *parser_result;
typedef struct yy_buffer_state *YY_BUFFER_STATE;
extern int yyrestart();
extern YY_BUFFER_STATE yy_scan_buffer(char *str, int i);
extern YY_BUFFER_STATE yy_scan_string(char *str);
extern void yy_delete_buffer(YY_BUFFER_STATE buffer);
extern FILE *yyin;
extern int yylineno;
UTEST_MAIN();
void read_file(const char *filename, char *expected_output) {
// Read the expected output from a file
FILE *expected_file = fopen(filename, "r");
if (expected_file == NULL) {
perror("fopen");
exit(EXIT_FAILURE);
}
size_t n =
fread(expected_output, 1, sizeof(expected_output) - 1, expected_file);
expected_output[n] = '\0';
fclose(expected_file);
}
void redirect_stdout(const char *filename, int evalutate) {
// Redirect stdout to a temporary file
FILE *temp_file = fopen(filename, "w");
if (temp_file == NULL) {
perror("tmpfile");
exit(EXIT_FAILURE);
}
int temp_fd = fileno(temp_file);
int stdout_fd = dup(STDOUT_FILENO);
dup2(temp_fd, STDOUT_FILENO);
// Perform operations that generate output
if (evalutate != 0) {
stmt_evaluate(parser_result);
} else {
stmt_print(parser_result);
}
// Flush and close stdout
fflush(stdout);
dup2(stdout_fd, STDOUT_FILENO);
close(stdout_fd);
}
UTEST(interpreter, print) {
yyin = fopen("samples/multiple_statements.c", "r");
yyrestart(yyin);
ASSERT_TRUE(yyin);
// yylineno = 1;
yylineno = 1;
int result = yyparse();
if (result == 0) {
// Catch the standard output and compare with expected test result
redirect_stdout("test_print.txt", 0);
redirect_stdout("test_evaluate.txt", 1);
}
// Assert the result to test correctness
ASSERT_EQ(result, 0);
char actual_print[1024];
read_file("test_print.txt", actual_print);
char expected_print[1024];
read_file("samples/multiple_statements_print.txt", expected_print);
ASSERT_STREQ(actual_print, expected_print);
char actual_evaluate[1024];
read_file("test_evaluate.txt", actual_evaluate);
char expected_evaluate[1024];
read_file("samples/multiple_statements_evaluate.txt", expected_evaluate);
ASSERT_STREQ(actual_evaluate, expected_evaluate);
}
UTEST(interpreter, program) {
yyin = fopen("samples/program.c", "r");
yyrestart(yyin);
ASSERT_TRUE(yyin);
yylineno = 1;
int result = yyparse();
if (result == 0) {
// Catch the standard output and compare with expected test result
redirect_stdout("test_print.txt", 0);
redirect_stdout("test_evaluate.txt", 1);
}
// Assert the result to test correctness
ASSERT_EQ(result, 0);
char actual_print[1024];
read_file("test_print.txt", actual_print);
char expected_print[1024];
read_file("samples/program_print.txt", expected_print);
ASSERT_STREQ(actual_print, expected_print);
char actual_evaluate[1024];
read_file("test_evaluate.txt", actual_evaluate);
char expected_evaluate[1024];
read_file("samples/program_evaluate.txt", expected_evaluate);
ASSERT_STREQ(actual_evaluate, expected_evaluate);
}
// UTEST(parser, missing_new_line) {
// // Must include the null character to terminate input
// char string[] = "1+8/4-3\0";
// YY_BUFFER_STATE buffer = yy_scan_buffer(string, sizeof(string));
// yylineno = 1;
// int result = yyparse();
// yy_delete_buffer(buffer);
// // Assert the result to test correctness
// ASSERT_EQ(result, 1);
// }
// UTEST(parser, hello_world) {
// // Read sample file as input
// yyin = fopen("samples/hello.py", "r");
// yyrestart(yyin);
// ASSERT_TRUE(yyin);
// yylineno = 1;
// int result = yyparse();
// // Assert the result to test correctness
// ASSERT_EQ(result, 0);
// }
// UTEST(parser, quadratic) {
// // Read sample file as input
// yyin = fopen("samples/quadratic.py", "r");
// yyrestart(yyin);
// ASSERT_TRUE(yyin);
// yylineno = 1;
// int result = yyparse();
// // Assert the result to test correctness
// ASSERT_EQ(result, 0);
// }

View File

@ -0,0 +1 @@
56

View File

@ -0,0 +1,2 @@
print 5;
print 6;

View File

@ -0,0 +1 @@
2024

View File

@ -0,0 +1,2 @@
a = ((101*20)+4);
print(a);

1668
lab-5/utest.h Normal file

File diff suppressed because it is too large Load Diff