From: Marc G. Fournier Date: Sat, 26 Apr 1997 05:45:48 +0000 (+0000) Subject: Clean out/up some files that are causing me great headaches since I didn't X-Git-Tag: REL6_1~251 X-Git-Url: https://granicus.if.org/sourcecode?a=commitdiff_plain;h=ba1a58919c47d80f6a2e2e5c1e5708427a248633;p=postgresql Clean out/up some files that are causing me great headaches since I didn't do this completely last time and Thomas is creating patches on files that aren't supposed to exist :( --- diff --git a/src/test/regress/GNUmakefile b/src/test/regress/GNUmakefile index a1c73755b1..44d179eea4 100644 --- a/src/test/regress/GNUmakefile +++ b/src/test/regress/GNUmakefile @@ -7,14 +7,14 @@ # # # IDENTIFICATION -# $Header: /cvsroot/pgsql/src/test/regress/GNUmakefile,v 1.4 1997/04/12 09:34:31 scrappy Exp $ +# $Header: /cvsroot/pgsql/src/test/regress/GNUmakefile,v 1.5 1997/04/26 05:44:06 scrappy Exp $ # #------------------------------------------------------------------------- SRCDIR= ../.. include ../../Makefile.global -CFLAGS+= -I$(LIBPQDIR) -I../../include +CFLAGS+= -I$(LIBPQDIR) LDADD+= -L$(LIBPQDIR) -lpq @@ -37,59 +37,21 @@ ifdef EXPSUFF INFILES+= $(DLOBJS:.o=$(EXPSUFF)) endif -# OUTFILES is the files that get created by running the regression test. -OUTFILES= stud_emp.data onek.data regress.out aportal.out - # # prepare to run the test (including clean-up after the last run) # all: $(INFILES) cd input; gmake all; cd .. cd output; gmake all; cd .. - rm -f $(OUTFILES) # # run the test # -runtest: $(INFILES) expected.out +runtest: $(INFILES) $(SHELL) ./regress.sh 2>&1 | tee regress.out @echo "ACTUAL RESULTS OF REGRESSION TEST ARE NOW IN FILE regress.out" -# The expected.input file is part of the distribution. It was made by hand -# from 'regress.out' from a reference run of the regression test, replacing -# installation-dependent things with names like _CWD_. The following rule -# turns those names back into real values for the instant installation to -# create a standard (expected.out) against which to compare regress.out -# from the experimental run. -# -# -expected.out: expected.input - if [ -z "$$USER" ]; then USER=$$LOGNAME; fi; \ - if [ -z "$$USER" ]; then USER=`whoami`; fi; \ - if [ -z "$$USER" ]; then echo 'Cannot deduce $USER.'; exit 1; fi; \ - rm -f expected.out; \ - MYTZ=`date | cut -c21`; \ - C="`pwd`"; \ - sed -e "s:_CWD_:$$C:g" \ - -e "s:_OBJWD_:$$C:g" \ - -e "s:_DLSUFFIX_:$(DLSUFFIX):g" \ - -e "s;\([A-Z][a-z][a-z][^ ]* [A-Z][a-z][a-z] [0-9 ][0-9] [0-9][0-9]:[0-9][0-9]:[0-9][0-9] [0-9][0-9][0-9][0-9] \)[A-Z]\([A-Z][A-Z]\);\1$$MYTZ\2;g" \ - -e "s;\([A-Z][a-z][a-z][^ ]* [A-Z][a-z][a-z] [0-9 ][0-9] [0-9][0-9]:[0-9][0-9]:[0-9][0-9] \)[A-Z]\([A-Z][A-Z] [0-9][0-9][0-9][0-9]\);\1$$MYTZ\2;g" \ - -e "s:_USER_:$$USER:g" < expected.input > expected.out - @echo "YOUR EXPECTED RESULTS ARE NOW IN FILE expected.out." - -%.sql: %.source - if [ -z "$$USER" ]; then USER=$$LOGNAME; fi; \ - if [ -z "$$USER" ]; then USER=`whoami`; fi; \ - if [ -z "$$USER" ]; then echo 'Cannot deduce $$USER.'; exit 1; fi; \ - rm -f $@; \ - C=`pwd`; \ - sed -e "s:_CWD_:$$C:g" \ - -e "s:_OBJWD_:$$C:g" \ - -e "s:_DLSUFFIX_:$(DLSUFFIX):g" \ - -e "s/_USER_/$$USER/g" < $< > $@ - clean: rm -f $(INFILES) - rm -f $(OUTFILES) - + $(MAKE) -C sql clean + $(MAKE) -C expected clean diff --git a/src/test/regress/expected/Makefile b/src/test/regress/expected/Makefile new file mode 100644 index 0000000000..1ec0111bf8 --- /dev/null +++ b/src/test/regress/expected/Makefile @@ -0,0 +1,17 @@ +#------------------------------------------------------------------------- +# +# Makefile-- +# Makefile for regress (the regression test) +# +# Copyright (c) 1994, Regents of the University of California +# +# +# IDENTIFICATION +# $Header: /cvsroot/pgsql/src/test/regress/expected/Attic/Makefile,v 1.1 1997/04/26 05:44:17 scrappy Exp $ +# +#------------------------------------------------------------------------- + +CLFILES= create_function_1.out create_function_2.out copy.out + +clean: + rm -f $(CLFILES) diff --git a/src/test/regress/input/Makefile b/src/test/regress/input/Makefile index 4cabda6555..cb9f89c1ef 100644 --- a/src/test/regress/input/Makefile +++ b/src/test/regress/input/Makefile @@ -7,7 +7,7 @@ # # # IDENTIFICATION -# $Header: /cvsroot/pgsql/src/test/regress/input/Attic/Makefile,v 1.2 1997/04/06 08:28:33 scrappy Exp $ +# $Header: /cvsroot/pgsql/src/test/regress/input/Attic/Makefile,v 1.3 1997/04/26 05:44:38 scrappy Exp $ # #------------------------------------------------------------------------- @@ -29,8 +29,9 @@ all: $(INFILES) if [ -z "$$USER" ]; then USER=`whoami`; fi; \ if [ -z "$$USER" ]; then echo 'Cannot deduce $$USER.'; exit 1; fi; \ rm -f $@; \ - C=`pwd`; \ + PWD=`pwd`; \ + OBJ=`pwd`; \ sed -e "s:_CWD_:$(PWD):g" \ - -e "s:_OBJWD_:$(PWD):g" \ + -e "s:_OBJWD_:$$OBJ/\.\.:g" \ -e "s:_DLSUFFIX_:$(DLSUFFIX):g" \ -e "s/_USER_/$$USER/g" < $< > ../sql/$@ diff --git a/src/test/regress/input/copy.source b/src/test/regress/input/copy.source index ad2cadd454..cf8e06cd34 100644 --- a/src/test/regress/input/copy.source +++ b/src/test/regress/input/copy.source @@ -6,43 +6,43 @@ -- CLASS POPULATION -- (any resemblance to real life is purely coincidental) -- -COPY onek FROM '_CWD_/data/onek.data'; +COPY onek FROM '_CWD_/../data/onek.data'; -COPY tenk1 FROM '_CWD_/data/tenk.data'; +COPY tenk1 FROM '_CWD_/../data/tenk.data'; -COPY slow_emp4000 FROM '_CWD_/data/rect.data'; +COPY slow_emp4000 FROM '_CWD_/../data/rect.data'; -COPY person FROM '_CWD_/data/person.data'; +COPY person FROM '_CWD_/../data/person.data'; -COPY emp FROM '_CWD_/data/emp.data'; +COPY emp FROM '_CWD_/../data/emp.data'; -COPY student FROM '_CWD_/data/student.data'; +COPY student FROM '_CWD_/../data/student.data'; -COPY stud_emp FROM '_CWD_/data/stud_emp.data'; +COPY stud_emp FROM '_CWD_/../data/stud_emp.data'; -COPY road FROM '_CWD_/data/streets.data'; +COPY road FROM '_CWD_/../data/streets.data'; -COPY real_city FROM '_CWD_/data/real_city.data'; +COPY real_city FROM '_CWD_/../data/real_city.data'; -COPY hash_i4_heap FROM '_CWD_/data/hash.data'; +COPY hash_i4_heap FROM '_CWD_/../data/hash.data'; -COPY hash_c16_heap FROM '_CWD_/data/hash.data'; +COPY hash_c16_heap FROM '_CWD_/../data/hash.data'; -COPY hash_txt_heap FROM '_CWD_/data/hash.data'; +COPY hash_txt_heap FROM '_CWD_/../data/hash.data'; -COPY hash_f8_heap FROM '_CWD_/data/hash.data'; +COPY hash_f8_heap FROM '_CWD_/../data/hash.data'; -- the data in this file has a lot of duplicates in the index key -- fields, leading to long bucket chains and lots of table expansion. -- this is therefore a stress test of the bucket overflow code (unlike -- the data in hash.data, which has unique index keys). -- --- COPY hash_ovfl_heap FROM '_CWD_/data/hashovfl.data'; +-- COPY hash_ovfl_heap FROM '_CWD_/../data/hashovfl.data'; -COPY bt_i4_heap FROM '_CWD_/data/desc.data'; +COPY bt_i4_heap FROM '_CWD_/../data/desc.data'; -COPY bt_c16_heap FROM '_CWD_/data/hash.data'; +COPY bt_c16_heap FROM '_CWD_/../data/hash.data'; -COPY bt_txt_heap FROM '_CWD_/data/desc.data'; +COPY bt_txt_heap FROM '_CWD_/../data/desc.data'; -COPY bt_f8_heap FROM '_CWD_/data/hash.data'; +COPY bt_f8_heap FROM '_CWD_/../data/hash.data'; diff --git a/src/test/regress/output/Makefile b/src/test/regress/output/Makefile index 9ec6d377a9..3e8bb77463 100644 --- a/src/test/regress/output/Makefile +++ b/src/test/regress/output/Makefile @@ -7,7 +7,7 @@ # # # IDENTIFICATION -# $Header: /cvsroot/pgsql/src/test/regress/output/Attic/Makefile,v 1.2 1997/04/06 08:28:57 scrappy Exp $ +# $Header: /cvsroot/pgsql/src/test/regress/output/Attic/Makefile,v 1.3 1997/04/26 05:45:13 scrappy Exp $ # #------------------------------------------------------------------------- @@ -27,14 +27,16 @@ all: $(INFILES) %.out: %.source if [ -z "$$USER" ]; then USER=$$LOGNAME; fi; \ if [ -z "$$USER" ]; then USER=`whoami`; fi; \ - if [ -z "$$USER" ]; then echo 'Cannot deduce $USER.'; exit 1; fi; \ + if [ -z "$$USER" ]; then echo 'Cannot deduce $$USER.'; exit 1; fi; \ rm -f expected.out; \ MYTZ=`date | cut -c21`; \ C="`pwd`"; \ - sed -e "s:_CWD_:$(PWD):g" \ + sed -e "s:_CWD_:$(PWD)/\.\.:g" \ + -e "s:\.\./output:\.\./input:g" \ -e "s:_OBJWD_:$(PWD):g" \ -e "s:_DLSUFFIX_:$(DLSUFFIX):g" \ -e "s;\([A-Z][a-z][a-z][^ ]* [A-Z][a-z][a-z] [0-9 ][0-9] [0-9][0-9]:[0-9][0-9]:[0-9][0-9] [0-9][0-9][0-9][0-9] \)[A-Z]\([A-Z][A-Z]\);\1$$MYTZ\2;g" \ -e "s;\([A-Z][a-z][a-z][^ ]* [A-Z][a-z][a-z] [0-9 ][0-9] [0-9][0-9]:[0-9][0-9]:[0-9][0-9] \)[A-Z]\([A-Z][A-Z] [0-9][0-9][0-9][0-9]\);\1$$MYTZ\2;g" \ - -e "s:_USER_:$$USER:g" < $< > ../expected/$@ + -e "s:_USER_:$$USER:g" < $< | \ + sed -e "s:/output/\.\./:/input/\.\./:g"> ../expected/$@ @echo "YOUR EXPECTED RESULTS ARE NOW IN FILE expected.out." diff --git a/src/test/regress/output/create_function.source b/src/test/regress/output/create_function.source deleted file mode 100644 index 2ef66b8029..0000000000 --- a/src/test/regress/output/create_function.source +++ /dev/null @@ -1,49 +0,0 @@ -QUERY: CREATE FUNCTION circle_in(opaque) - RETURNS circle - AS '_CWD_/regress_DLSUFFIX_' - LANGUAGE 'c'; -NOTICE:ProcedureCreate: type 'circle' is not yet defined -QUERY: CREATE FUNCTION circle_out(opaque) - RETURNS opaque - AS '_CWD_/regress_DLSUFFIX_' - LANGUAGE 'c'; -QUERY: CREATE FUNCTION hobbies(person) - RETURNS setof hobbies_r - AS 'select * from hobbies_r where person = $1.name' - LANGUAGE 'sql'; -QUERY: CREATE FUNCTION hobby_construct(text, text) - RETURNS hobbies_r - AS 'select $1 as name, $2 as hobby' - LANGUAGE 'sql'; -QUERY: CREATE FUNCTION equipment(hobbies_r) - RETURNS setof equipment_r - AS 'select * from equipment_r where hobby = $1.name' - LANGUAGE 'sql'; -QUERY: CREATE FUNCTION user_relns() - RETURNS setof name - AS 'select relname - from pg_class - where relname !~ ''pg_.*'' and - relkind <> ''i'' ' - LANGUAGE 'sql'; -QUERY: CREATE FUNCTION pt_in_circle(point, circle) - RETURNS int4 - AS '_CWD_/regress_DLSUFFIX_' - LANGUAGE 'c'; -QUERY: CREATE FUNCTION overpaid(emp) - RETURNS bool - AS '_CWD_/regress_DLSUFFIX_' - LANGUAGE 'c'; -QUERY: CREATE FUNCTION boxarea(box) - RETURNS int4 - AS '_CWD_/regress_DLSUFFIX_' - LANGUAGE 'c'; -QUERY: CREATE FUNCTION interpt_pp(path, path) - RETURNS point - AS '_CWD_/regress_DLSUFFIX_' - LANGUAGE 'c'; -QUERY: CREATE FUNCTION reverse_c16(char16) - RETURNS char16 - AS '_CWD_/regress_DLSUFFIX_' - LANGUAGE 'c'; -QUERY: LOAD '_CWD_/regress_DLSUFFIX_'; diff --git a/src/test/regress/output/create_misc.source b/src/test/regress/output/create_misc.source deleted file mode 100644 index efe5f2b840..0000000000 --- a/src/test/regress/output/create_misc.source +++ /dev/null @@ -1,151 +0,0 @@ -QUERY: COPY onek FROM '_CWD_/data/onek.data'; -QUERY: COPY tenk1 FROM '_CWD_/data/tenk.data'; -QUERY: INSERT INTO tenk2 VALUES (tenk1.*); -QUERY: SELECT * INTO TABLE onek2 FROM onek; -QUERY: COPY slow_emp4000 FROM '_CWD_/data/rect.data'; -QUERY: INSERT INTO fast_emp4000 VALUES (slow_emp4000.*); -QUERY: COPY person FROM '_CWD_/data/person.data'; -QUERY: COPY emp FROM '_CWD_/data/emp.data'; -QUERY: COPY student FROM '_CWD_/data/student.data'; -QUERY: COPY stud_emp FROM '_CWD_/data/stud_emp.data'; -QUERY: SELECT * - INTO TABLE Bprime - FROM tenk1 - WHERE unique2 < 1000; -QUERY: INSERT INTO hobbies_r (name, person) - SELECT 'posthacking', p.name - FROM person* p - WHERE p.name = 'mike' or p.name = 'jeff'; -QUERY: INSERT INTO hobbies_r (name, person) - SELECT 'basketball', p.name - FROM person p - WHERE p.name = 'joe' or p.name = 'sally'; -QUERY: INSERT INTO hobbies_r (name) VALUES ('skywalking'); -QUERY: INSERT INTO equipment_r (name, hobby) VALUES ('advil', 'posthacking'); -QUERY: INSERT INTO equipment_r (name, hobby) VALUES ('peet''s coffee', 'posthacking'); -QUERY: INSERT INTO equipment_r (name, hobby) VALUES ('hightops', 'basketball'); -QUERY: INSERT INTO equipment_r (name, hobby) VALUES ('guts', 'skywalking'); -QUERY: COPY road FROM '_CWD_/data/streets.data'; -QUERY: COPY real_city FROM '_CWD_/data/real_city.data'; -QUERY: SELECT * - INTO TABLE ramp - FROM road - WHERE name ~ '.*Ramp'; -QUERY: INSERT INTO ihighway - SELECT * - FROM road - WHERE name ~ 'I- .*'; -QUERY: INSERT INTO shighway - SELECT * - FROM road - WHERE name ~ 'State Hwy.*'; -QUERY: UPDATE shighway - SET surface = 'asphalt'; -QUERY: INSERT INTO a_star (class, a) VALUES ('a', 1); -QUERY: INSERT INTO a_star (class, a) VALUES ('a', 2); -QUERY: INSERT INTO a_star (class) VALUES ('a'); -QUERY: INSERT INTO b_star (class, a, b) VALUES ('b', 3, 'mumble'::text); -QUERY: INSERT INTO b_star (class, a) VALUES ('b', 4); -QUERY: INSERT INTO b_star (class, b) VALUES ('b', 'bumble'::text); -QUERY: INSERT INTO b_star (class) VALUES ('b'); -QUERY: INSERT INTO c_star (class, a, c) VALUES ('c', 5, 'hi mom'::char16); -QUERY: INSERT INTO c_star (class, a) VALUES ('c', 6); -QUERY: INSERT INTO c_star (class, c) VALUES ('c', 'hi paul'::char16); -QUERY: INSERT INTO c_star (class) VALUES ('c'); -QUERY: INSERT INTO d_star (class, a, b, c, d) - VALUES ('d', 7, 'grumble'::text, 'hi sunita'::char16, '0.0'::float8); -QUERY: INSERT INTO d_star (class, a, b, c) - VALUES ('d', 8, 'stumble'::text, 'hi koko'::char16); -QUERY: INSERT INTO d_star (class, a, b, d) - VALUES ('d', 9, 'rumble'::text, '1.1'::float8); -QUERY: INSERT INTO d_star (class, a, c, d) - VALUES ('d', 10, 'hi kristin'::char16, '10.01'::float8); -QUERY: INSERT INTO d_star (class, b, c, d) - VALUES ('d', 'crumble'::text, 'hi boris'::char16, '100.001'::float8); -QUERY: INSERT INTO d_star (class, a, b) - VALUES ('d', 11, 'fumble'::text); -QUERY: INSERT INTO d_star (class, a, c) - VALUES ('d', 12, 'hi avi'::char16); -QUERY: INSERT INTO d_star (class, a, d) - VALUES ('d', 13, '1000.0001'::float8); -QUERY: INSERT INTO d_star (class, b, c) - VALUES ('d', 'tumble'::text, 'hi andrew'::char16); -QUERY: INSERT INTO d_star (class, b, d) - VALUES ('d', 'humble'::text, '10000.00001'::float8); -QUERY: INSERT INTO d_star (class, c, d) - VALUES ('d', 'hi ginger'::char16, '100000.000001'::float8); -QUERY: INSERT INTO d_star (class, a) VALUES ('d', 14); -QUERY: INSERT INTO d_star (class, b) VALUES ('d', 'jumble'::text); -QUERY: INSERT INTO d_star (class, c) VALUES ('d', 'hi jolly'::char16); -QUERY: INSERT INTO d_star (class, d) VALUES ('d', '1000000.0000001'::float8); -QUERY: INSERT INTO d_star (class) VALUES ('d'); -QUERY: INSERT INTO e_star (class, a, c, e) - VALUES ('e', 15, 'hi carol'::char16, '-1'::int2); -QUERY: INSERT INTO e_star (class, a, c) - VALUES ('e', 16, 'hi bob'::char16); -QUERY: INSERT INTO e_star (class, a, e) - VALUES ('e', 17, '-2'::int2); -QUERY: INSERT INTO e_star (class, c, e) - VALUES ('e', 'hi michelle'::char16, '-3'::int2); -QUERY: INSERT INTO e_star (class, a) - VALUES ('e', 18); -QUERY: INSERT INTO e_star (class, c) - VALUES ('e', 'hi elisa'::char16); -QUERY: INSERT INTO e_star (class, e) - VALUES ('e', '-4'::int2); -QUERY: INSERT INTO f_star (class, a, c, e, f) - VALUES ('f', 19, 'hi claire'::char16, '-5'::int2, '(1,2,3,4)'::polygon); -QUERY: INSERT INTO f_star (class, a, c, e) - VALUES ('f', 20, 'hi mike'::char16, '-6'::int2); -QUERY: INSERT INTO f_star (class, a, c, f) - VALUES ('f', 21, 'hi marcel'::char16, '(11,22,33,44,55,66)'::polygon); -QUERY: INSERT INTO f_star (class, a, e, f) - VALUES ('f', 22, '-7'::int2, '(111,222,333,444,555,666,777,888)'::polygon); -QUERY: INSERT INTO f_star (class, c, e, f) - VALUES ('f', 'hi keith'::char16, '-8'::int2, - '(1111,2222,3333,4444)'::polygon); -QUERY: INSERT INTO f_star (class, a, c) - VALUES ('f', 24, 'hi marc'::char16); -QUERY: INSERT INTO f_star (class, a, e) - VALUES ('f', 25, '-9'::int2); -QUERY: INSERT INTO f_star (class, a, f) - VALUES ('f', 26, '(11111,22222,33333,44444)'::polygon); -QUERY: INSERT INTO f_star (class, c, e) - VALUES ('f', 'hi allison'::char16, '-10'::int2); -QUERY: INSERT INTO f_star (class, c, f) - VALUES ('f', 'hi jeff'::char16, - '(111111,222222,333333,444444)'::polygon); -QUERY: INSERT INTO f_star (class, e, f) - VALUES ('f', '-11'::int2, '(1111111,2222222,3333333,4444444)'::polygon); -QUERY: INSERT INTO f_star (class, a) VALUES ('f', 27); -QUERY: INSERT INTO f_star (class, c) VALUES ('f', 'hi carl'::char16); -QUERY: INSERT INTO f_star (class, e) VALUES ('f', '-12'::int2); -QUERY: INSERT INTO f_star (class, f) - VALUES ('f', '(11111111,22222222,33333333,44444444)'::polygon); -QUERY: INSERT INTO f_star (class) VALUES ('f'); -QUERY: COPY hash_i4_heap FROM '_CWD_/data/hash.data'; -QUERY: COPY hash_c16_heap FROM '_CWD_/data/hash.data'; -QUERY: COPY hash_txt_heap FROM '_CWD_/data/hash.data'; -QUERY: COPY hash_f8_heap FROM '_CWD_/data/hash.data'; -QUERY: COPY bt_i4_heap FROM '_CWD_/data/desc.data'; -QUERY: COPY bt_c16_heap FROM '_CWD_/data/hash.data'; -QUERY: COPY bt_txt_heap FROM '_CWD_/data/desc.data'; -QUERY: COPY bt_f8_heap FROM '_CWD_/data/hash.data'; -QUERY: INSERT INTO arrtest (a[5], b[2][1][2], c, d) - VALUES ('{1,2,3,4,5}', '{{{},{1,2}}}', '{}', '{}'); -QUERY: UPDATE arrtest SET e[0] = '1.1'; -QUERY: UPDATE arrtest SET e[1] = '2.2'; -QUERY: INSERT INTO arrtest (a, b[2][2][1], c, d, e) - VALUES ('{11,12,23}', '{{3,4},{4,5}}', '{"foobar"}', - '{{"elt1", "elt2"}}', '{"3.4", "6.7"}'); -QUERY: INSERT INTO arrtest (a, b[1][2][2], c, d[2][1]) - VALUES ('{}', '{3,4}', '{foo,bar}', '{bar,foo}'); -QUERY: CREATE TABLE iportaltest ( - i int4, - d float4, - p polygon -); -QUERY: INSERT INTO iportaltest (i, d, p) - VALUES (1, 3.567, '(3.0,4.0,1.0,2.0)'::polygon); -QUERY: INSERT INTO iportaltest (i, d, p) - VALUES (2, 89.05, '(4.0,3.0,2.0,1.0)'::polygon); diff --git a/src/test/regress/sql/Makefile b/src/test/regress/sql/Makefile new file mode 100644 index 0000000000..3e77f986b5 --- /dev/null +++ b/src/test/regress/sql/Makefile @@ -0,0 +1,17 @@ +#------------------------------------------------------------------------- +# +# Makefile-- +# Makefile for regress (the regression test) +# +# Copyright (c) 1994, Regents of the University of California +# +# +# IDENTIFICATION +# $Header: /cvsroot/pgsql/src/test/regress/sql/Attic/Makefile,v 1.1 1997/04/26 05:45:48 scrappy Exp $ +# +#------------------------------------------------------------------------- + +CLFILES= create_function_1.sql create_function_2.sql copy.sql + +clean: + rm -f $(CLFILES)