-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathtest.java
More file actions
270 lines (216 loc) · 11.1 KB
/
test.java
File metadata and controls
270 lines (216 loc) · 11.1 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
public void insertIntoTable(String strTableName,
Hashtable<String,Object> htblColNameValue) throws DBAppException{
Table t;
if(!Serializer.wasSerialized(strTableName)){
throw new DBAppException("Table does not exist");
}
try {
t = (Table) Serializer.deSerialize(strTableName);
} catch (Exception e) {
throw new DBAppException("An error occured while deserializing the table");
}
Hashtable<String,String> hash = t.getIndexHash(); //column name , Index name
Vector<Vector<String>> meta = getCSV();
if(htblColNameValue.size()!=t.getColNameType().size()){
// Or add the values but as null
throw new DBAppException("Number of values are not the same");
}
for (Map.Entry<String, Object> entry : htblColNameValue.entrySet()) {
String columnName = entry.getKey(); // Getting Key (column name)
Object columnValue = entry.getValue();
Class<?> columnClass = columnValue.getClass();
String columnClassName = columnClass.getSimpleName();
Boolean found = false; //Check if given column exists in the columns in the table
System.out.println("strTableName columnName");
System.out.println(strTableName+" "+columnName);
for(int i=0;i<meta.size();i++){
String tableNameMeta = meta.get(i).get(0);
String columnNameMeta = meta.get(i).get(1);
System.out.println("tableNameMeta columnNameMeta");
System.out.println(tableNameMeta+" "+columnNameMeta);
if(tableNameMeta.compareTo(strTableName)==0 && columnNameMeta.compareToIgnoreCase(columnName)==0){
// At current Table and current column
found = true;
String columnTypeMeta = meta.get(i).get(2);
// Checking that the type of input and type of column in metadata are the same, if not throw an exception
if(columnTypeMeta=="java.lang.Integer"&&columnClassName!="int"){
// Debug in seperate file
throw new DBAppException("Incompatible Data Type");
}else
if(columnTypeMeta=="java.lang.String"&&columnClassName!="String"){
// Debug in seperate file
throw new DBAppException("Incompatible Data Type");
}else
if(columnTypeMeta=="java.lang.Double"&&columnClassName!="double"){
// Debug in seperate file
throw new DBAppException("Incompatible Data Type");
}
}
}
// If said column not in metadata then throw exception
if(!found){
throw new DBAppException(columnName + " does not exist in the table");
}
}
Index index;
// Stopped Here
//continued --> tatos
// Transfer contents from Hashtable to HashMap
HashMap<String, Object> hashMap1 = new HashMap<>();
for (Map.Entry<String, Object> entry : htblColNameValue.entrySet()) {
hashMap1.put(entry.getKey(), entry.getValue());
}
Tuple tupletoinsert = new Tuple(hashMap1); // our tuple with values to insert
String primarykeyy = t.getClusteringKeyColumn();
String thedesiredpagename = "";
//table has an index on the primary key column --> we'll use it in the insertion
if(!hash.containsKey(primarykeyy)){
//no index on the primary key --> insert normally
Object [] binarySearchReturn = binarysearchkonato(t, tupletoinsert);
// 3 return cases in binarysearchkonato:
// 1) string: this will be the first tuple to insert
// 2) contains -1 : my tuple's PK is the smallest PK
// 3) normal ({rakameltuple, lastpage1, lastpage, rakamelpage})
Page firstPage;
if (binarySearchReturn[0] instanceof String) {
try{
firstPage = t.createPage();
}
catch (Exception ex){
throw new DBAppException("An error occured while creating the first page");
}
try{
Page despage = (Page) Serializer.deSerialize(firstPage.fileName);
thedesiredpagename = despage.fileName;
String ourinsertedrow = despage.addTuple(tupletoinsert); //return string won't be used
//serialize the page
Serializer.serialize(despage, despage.fileName);
}
catch(Exception e){
throw new DBAppException("An error occured while serializing or deserializing the page");
}
} else if (binarySearchReturn[0] instanceof Integer) {
if((int)binarySearchReturn[0] == -1){
thedesiredpagename = insertinpageskonato(t, tupletoinsert,0, -1);
}
else{
int rakameltuple = (int) binarySearchReturn[0]; //tuple just before the tuple we wish to insert
int rakamelpage = (int) binarySearchReturn[3];
thedesiredpagename = insertinpageskonato(t, tupletoinsert, rakamelpage, rakameltuple);
}
}
}
else{
//there exist an index on the primary key column that will be used in the insertion
//recall: Hashtable<String,Index> hash = t.getIndexHash(); //column name , Index
String primarykey = t.getClusteringKeyColumn();
//table has an index on the primary key column --> we'll use it in the insertion
String indexName = hash.get(primarykey);
Index clusteringindex;
try {
clusteringindex = (Index) Serializer.deSerialize(indexName);
} catch (Exception e) {
// TODO: handle exception
throw new DBAppException("An error occured while deserializing the index");
}
bplustree bp = clusteringindex.getBTree();
Object VTpInBt = tupletoinsert.getValue(primarykey); // value of the PK column in the tuple to insert
Comparable comp = (Comparable) VTpInBt;
thedesiredpagename = bplustree.helperforindexsearch(bp,comp);
if(thedesiredpagename.equals("we are going to insert our tuple in the last page if not full handled in DB APP")){ //if rightsibling = null
try{
String thelastpagename = t.getPageFileNames().get((t.getPageFileNames()).size() -1);
int rakamAkherPage = t.getPageFileNames().size()-1;
System.out.println("252");
Page thelastpage = (Page) Serializer.deSerialize(thelastpagename);
Serializer.serialize(thelastpage, thelastpage.fileName);
int tupleToInsertAfter = binarysearchkonato2(thelastpage, tupletoinsert, t); //we found the page to insert into before the try but we still need to find our tuple's location in that page
thedesiredpagename = insertinpageskonato(t,tupletoinsert,rakamAkherPage,tupleToInsertAfter); //holds the page name that stores our tuple
}
catch(Exception ex){
throw new DBAppException("An error occured while serializing or deserializing the page");
}
}
else{ //right sibling != null so thedesiredpagename holds the page name (String) that we'll insert the tuple into
int rakamofthepage = 0;
int i = 0;
while(t.getPageFileNames().get(i)!= thedesiredpagename){
i = i + 1;
rakamofthepage = rakamofthepage +1;
}
Page thedesiredpage;
try{
System.out.println("273");
thedesiredpage = (Page) Serializer.deSerialize(thedesiredpagename);
Serializer.serialize(thedesiredpage, thedesiredpage.fileName);
int rakamofthetuple = binarysearchkonato2(thedesiredpage, tupletoinsert, t);
String notimportantstring = insertinpageskonato(t,tupletoinsert,rakamofthepage,rakamofthetuple);
}
catch(Exception ex){
throw new DBAppException("An error occured while serializing or deserializing the page");
}
}
try{
Serializer.serialize(clusteringindex, indexName);
}
catch(Exception ex){
throw new DBAppException("An error occured while serializing the index: " + indexName);
}
// Get the enumeration of keys --> enumeration object containing all keys in the hashtable
// all column names for columns that have an index
/*Enumeration<String> keys = hash.keys();
boolean flagx = true;
while(flagx){
if (keys.hasMoreElements()) {
// Get the first key (column name)
String firstKey = keys.nextElement();
// Retrieve the value associated with the first key
Index firstValue = hash.get(firstKey);
bplustree treex = firstValue.getBTree();
// treex.insert(firstKey, firstValue); TODO firstvalue is add as index?
flagx = true;
} else {
flagx = false;
//inserted into all indexes we have
}
}*/
}
//5alasna inserting fel pages (either normally or using PK index)
// now we'll update all indexes we have and serialize them
for (Map.Entry<String, String> entry : hash.entrySet()) {
String columnName = entry.getKey(); // Getting Key (column name)
String indexName = entry.getValue();
try {
index = (Index) Serializer.deSerialize(indexName);
} catch (Exception e) {
// TODO: handle exception
throw new DBAppException("error deserializing: " + indexName);
}
bplustree tree = index.getBTree();
Comparable o1;
//checking that the column that you have it's index is the same as the column you'll insert into it's index
for (Map.Entry<String, Object> entry1 : htblColNameValue.entrySet()) {
String column = entry1.getKey(); // Getting Key (column name)
o1 = (Comparable)entry1.getValue();
if(column.equals(columnName)){
Vector<String> vectorofpages = tree.search(o1);
if(vectorofpages == null){
tree.insert(o1, thedesiredpagename);
}
else{
//if(!vectorofpages.contains(thedesiredpagename)){
vectorofpages.add(thedesiredpagename);
//}
}
break;
}
}
try{
Serializer.serialize(index, indexName);
}
catch(Exception e){
throw new DBAppException("An error occured while serializing the index: " + indexName);
}
}
// throw new DBAppException("not implemented yet");
}