1 |
xmw 13/02/28 07:28:03 |
2 |
|
3 |
Added: hubbub-0.1.2-error.patch |
4 |
Log: |
5 |
netsurf eclass preparation. Fix enum comparision and unused vars. |
6 |
|
7 |
(Portage version: 2.2.0_alpha163/cvs/Linux x86_64, signed Manifest commit with key 62EEF090) |
8 |
|
9 |
Revision Changes Path |
10 |
1.1 net-libs/hubbub/files/hubbub-0.1.2-error.patch |
11 |
|
12 |
file : http://sources.gentoo.org/viewvc.cgi/gentoo-x86/net-libs/hubbub/files/hubbub-0.1.2-error.patch?rev=1.1&view=markup |
13 |
plain: http://sources.gentoo.org/viewvc.cgi/gentoo-x86/net-libs/hubbub/files/hubbub-0.1.2-error.patch?rev=1.1&content-type=text/plain |
14 |
|
15 |
Index: hubbub-0.1.2-error.patch |
16 |
=================================================================== |
17 |
--- hubbub-0.1.2/test/csdetect.c |
18 |
+++ hubbub-0.1.2/test/csdetect.c |
19 |
@@ -108,7 +108,7 @@ |
20 |
static int testnum; |
21 |
|
22 |
assert(hubbub_charset_extract(data, len, |
23 |
- &mibenum, &source) == HUBBUB_OK); |
24 |
+ &mibenum, &source) == (parserutils_error)HUBBUB_OK); |
25 |
|
26 |
assert(mibenum != 0); |
27 |
|
28 |
--- hubbub-0.1.2/test/parser.c |
29 |
+++ hubbub-0.1.2/test/parser.c |
30 |
@@ -24,7 +24,7 @@ |
31 |
hubbub_parser *parser; |
32 |
hubbub_parser_optparams params; |
33 |
FILE *fp; |
34 |
- size_t len, origlen; |
35 |
+ size_t len; |
36 |
uint8_t *buf = alloca(CHUNK_SIZE); |
37 |
const char *charset; |
38 |
hubbub_charset_source cssource; |
39 |
@@ -46,7 +46,7 @@ |
40 |
} |
41 |
|
42 |
fseek(fp, 0, SEEK_END); |
43 |
- origlen = len = ftell(fp); |
44 |
+ len = ftell(fp); |
45 |
fseek(fp, 0, SEEK_SET); |
46 |
|
47 |
while (len > 0) { |
48 |
--- hubbub-0.1.2/test/tokeniser.c |
49 |
+++ hubbub-0.1.2/test/tokeniser.c |
50 |
@@ -26,7 +26,7 @@ |
51 |
hubbub_tokeniser *tok; |
52 |
hubbub_tokeniser_optparams params; |
53 |
FILE *fp; |
54 |
- size_t len, origlen; |
55 |
+ size_t len; |
56 |
#define CHUNK_SIZE (4096) |
57 |
uint8_t buf[CHUNK_SIZE]; |
58 |
|
59 |
@@ -44,7 +44,7 @@ |
60 |
params.token_handler.handler = token_handler; |
61 |
params.token_handler.pw = NULL; |
62 |
assert(hubbub_tokeniser_setopt(tok, HUBBUB_TOKENISER_TOKEN_HANDLER, |
63 |
- ¶ms) == HUBBUB_OK); |
64 |
+ ¶ms) == (hubbub_error)HUBBUB_OK); |
65 |
|
66 |
fp = fopen(argv[1], "rb"); |
67 |
if (fp == NULL) { |
68 |
@@ -53,7 +53,7 @@ |
69 |
} |
70 |
|
71 |
fseek(fp, 0, SEEK_END); |
72 |
- origlen = len = ftell(fp); |
73 |
+ len = ftell(fp); |
74 |
fseek(fp, 0, SEEK_SET); |
75 |
|
76 |
while (len > 0) { |
77 |
@@ -63,7 +63,7 @@ |
78 |
break; |
79 |
|
80 |
assert(parserutils_inputstream_append(stream, |
81 |
- buf, bytes_read) == HUBBUB_OK); |
82 |
+ buf, bytes_read) == (parserutils_error)HUBBUB_OK); |
83 |
|
84 |
|
85 |
len -= bytes_read; |
86 |
|
87 |
--- hubbub-0.1.2/test/tokeniser2.c |
88 |
+++ hubbub-0.1.2/test/tokeniser2.c |
89 |
@@ -83,11 +83,9 @@ |
90 |
printf("Test: %s\n", |
91 |
json_object_get_string(val)); |
92 |
} else if (strcmp(key, "input") == 0) { |
93 |
- int len; |
94 |
ctx.input = (const uint8_t *) |
95 |
- json_object_get_string_len(val, |
96 |
- &len); |
97 |
- ctx.input_len = len; |
98 |
+ json_object_get_string(val); |
99 |
+ ctx.input_len = json_object_get_string_len(val); |
100 |
} else if (strcmp(key, "output") == 0) { |
101 |
ctx.output = json_object_get_array(val); |
102 |
ctx.output_index = 0; |
103 |
@@ -151,7 +149,7 @@ |
104 |
ctx->last_start_tag); |
105 |
|
106 |
assert(parserutils_inputstream_append(stream, |
107 |
- buf, len - 1) == HUBBUB_OK); |
108 |
+ buf, len - 1) == (parserutils_error)HUBBUB_OK); |
109 |
|
110 |
assert(hubbub_tokeniser_run(tok) == HUBBUB_OK); |
111 |
} |
112 |
@@ -173,7 +171,7 @@ |
113 |
params.content_model.model = |
114 |
HUBBUB_CONTENT_MODEL_PCDATA; |
115 |
} else { |
116 |
- char *cm = json_object_get_string( |
117 |
+ const char *cm = json_object_get_string( |
118 |
(struct json_object *) |
119 |
array_list_get_idx(ctx->content_model, i)); |
120 |
|
121 |
@@ -196,10 +194,10 @@ |
122 |
¶ms) == HUBBUB_OK); |
123 |
|
124 |
assert(parserutils_inputstream_append(stream, |
125 |
- ctx->input, ctx->input_len) == HUBBUB_OK); |
126 |
+ ctx->input, ctx->input_len) == (parserutils_error)HUBBUB_OK); |
127 |
|
128 |
assert(parserutils_inputstream_append(stream, NULL, 0) == |
129 |
- HUBBUB_OK); |
130 |
+ (parserutils_error)HUBBUB_OK); |
131 |
|
132 |
printf("Input: '%.*s' (%d)\n", (int) ctx->input_len, |
133 |
(const char *) ctx->input, |
134 |
@@ -271,11 +269,11 @@ |
135 |
switch (token->type) { |
136 |
case HUBBUB_TOKEN_DOCTYPE: |
137 |
{ |
138 |
- char *expname = json_object_get_string( |
139 |
+ const char *expname = json_object_get_string( |
140 |
array_list_get_idx(items, 1)); |
141 |
- char *exppub = json_object_get_string( |
142 |
+ const char *exppub = json_object_get_string( |
143 |
array_list_get_idx(items, 2)); |
144 |
- char *expsys = json_object_get_string( |
145 |
+ const char *expsys = json_object_get_string( |
146 |
array_list_get_idx(items, 3)); |
147 |
bool expquirks = !json_object_get_boolean( |
148 |
array_list_get_idx(items, 4)); |
149 |
@@ -332,7 +330,7 @@ |
150 |
break; |
151 |
case HUBBUB_TOKEN_START_TAG: |
152 |
{ |
153 |
- char *expname = json_object_get_string( |
154 |
+ const char *expname = json_object_get_string( |
155 |
array_list_get_idx(items, 1)); |
156 |
struct lh_entry *expattrs = json_object_get_object( |
157 |
array_list_get_idx(items, 2))->head; |
158 |
@@ -366,7 +364,7 @@ |
159 |
|
160 |
for (i = 0; i < token->data.tag.n_attributes; i++) { |
161 |
char *expname = (char *) expattrs->k; |
162 |
- char *expval = json_object_get_string( |
163 |
+ const char *expval = json_object_get_string( |
164 |
(struct json_object *) expattrs->v); |
165 |
const char *gotname = (const char *) |
166 |
token->data.tag.attributes[i].name.ptr; |
167 |
@@ -395,7 +393,7 @@ |
168 |
break; |
169 |
case HUBBUB_TOKEN_END_TAG: |
170 |
{ |
171 |
- char *expname = json_object_get_string( |
172 |
+ const char *expname = json_object_get_string( |
173 |
array_list_get_idx(items, 1)); |
174 |
const char *tagname = (const char *) |
175 |
token->data.tag.name.ptr; |
176 |
@@ -412,7 +410,7 @@ |
177 |
break; |
178 |
case HUBBUB_TOKEN_COMMENT: |
179 |
{ |
180 |
- char *expstr = json_object_get_string( |
181 |
+ const char *expstr = json_object_get_string( |
182 |
array_list_get_idx(items, 1)); |
183 |
const char *gotstr = (const char *) |
184 |
token->data.comment.ptr; |
185 |
@@ -427,9 +425,10 @@ |
186 |
break; |
187 |
case HUBBUB_TOKEN_CHARACTER: |
188 |
{ |
189 |
- int expstrlen; |
190 |
- char *expstr = json_object_get_string_len( |
191 |
- array_list_get_idx(items, 1), &expstrlen); |
192 |
+ int expstrlen = json_object_get_string_len( |
193 |
+ array_list_get_idx(items, 1)); |
194 |
+ const char *expstr = json_object_get_string( |
195 |
+ array_list_get_idx(items, 1)); |
196 |
const char *gotstr = (const char *) |
197 |
token->data.character.ptr; |
198 |
size_t len = min(token->data.character.len, |
199 |
--- hubbub-0.1.2/test/tokeniser3.c |
200 |
+++ hubbub-0.1.2/test/tokeniser3.c |
201 |
@@ -81,11 +81,9 @@ |
202 |
printf("Test: %s\n", |
203 |
json_object_get_string(val)); |
204 |
} else if (strcmp(key, "input") == 0) { |
205 |
- int len; |
206 |
ctx.input = (const uint8_t *) |
207 |
- json_object_get_string_len(val, |
208 |
- &len); |
209 |
- ctx.input_len = len; |
210 |
+ json_object_get_string(val); |
211 |
+ ctx.input_len = json_object_get_string_len(val); |
212 |
} else if (strcmp(key, "output") == 0) { |
213 |
ctx.output = json_object_get_array(val); |
214 |
ctx.output_index = 0; |
215 |
@@ -148,7 +146,7 @@ |
216 |
ctx->last_start_tag); |
217 |
|
218 |
assert(parserutils_inputstream_append(stream, |
219 |
- buf, len - 1) == HUBBUB_OK); |
220 |
+ buf, len - 1) == (parserutils_error)HUBBUB_OK); |
221 |
|
222 |
assert(hubbub_tokeniser_run(tok) == HUBBUB_OK); |
223 |
} |
224 |
@@ -170,7 +168,7 @@ |
225 |
params.content_model.model = |
226 |
HUBBUB_CONTENT_MODEL_PCDATA; |
227 |
} else { |
228 |
- char *cm = json_object_get_string( |
229 |
+ const char *cm = json_object_get_string( |
230 |
(struct json_object *) |
231 |
array_list_get_idx(ctx->content_model, i)); |
232 |
|
233 |
@@ -197,13 +197,13 @@ |
234 |
for (j = 0; j < ctx->input_len; j++) { |
235 |
assert(parserutils_inputstream_append(stream, |
236 |
ctx->input + j, 1) == |
237 |
- HUBBUB_OK); |
238 |
+ (parserutils_error)HUBBUB_OK); |
239 |
|
240 |
assert(hubbub_tokeniser_run(tok) == HUBBUB_OK); |
241 |
} |
242 |
|
243 |
assert(parserutils_inputstream_append(stream, NULL, 0) == |
244 |
- HUBBUB_OK); |
245 |
+ (parserutils_error)HUBBUB_OK); |
246 |
|
247 |
assert(hubbub_tokeniser_run(tok) == HUBBUB_OK); |
248 |
|
249 |
@@ -273,11 +271,11 @@ |
250 |
switch (token->type) { |
251 |
case HUBBUB_TOKEN_DOCTYPE: |
252 |
{ |
253 |
- char *expname = json_object_get_string( |
254 |
+ const char *expname = json_object_get_string( |
255 |
array_list_get_idx(items, 1)); |
256 |
- char *exppub = json_object_get_string( |
257 |
+ const char *exppub = json_object_get_string( |
258 |
array_list_get_idx(items, 2)); |
259 |
- char *expsys = json_object_get_string( |
260 |
+ const char *expsys = json_object_get_string( |
261 |
array_list_get_idx(items, 3)); |
262 |
bool expquirks = !json_object_get_boolean( |
263 |
array_list_get_idx(items, 4)); |
264 |
@@ -337,7 +335,7 @@ |
265 |
break; |
266 |
case HUBBUB_TOKEN_START_TAG: |
267 |
{ |
268 |
- char *expname = json_object_get_string( |
269 |
+ const char *expname = json_object_get_string( |
270 |
array_list_get_idx(items, 1)); |
271 |
struct lh_entry *expattrs = json_object_get_object( |
272 |
array_list_get_idx(items, 2))->head; |
273 |
@@ -371,7 +369,7 @@ |
274 |
|
275 |
for (i = 0; i < token->data.tag.n_attributes; i++) { |
276 |
char *expname = (char *) expattrs->k; |
277 |
- char *expval = json_object_get_string( |
278 |
+ const char *expval = json_object_get_string( |
279 |
(struct json_object *) expattrs->v); |
280 |
const char *gotname = (const char *) |
281 |
token->data.tag.attributes[i].name.ptr; |
282 |
@@ -400,7 +398,7 @@ |
283 |
break; |
284 |
case HUBBUB_TOKEN_END_TAG: |
285 |
{ |
286 |
- char *expname = json_object_get_string( |
287 |
+ const char *expname = json_object_get_string( |
288 |
array_list_get_idx(items, 1)); |
289 |
const char *tagname = (const char *) |
290 |
token->data.tag.name.ptr; |
291 |
@@ -417,7 +415,7 @@ |
292 |
break; |
293 |
case HUBBUB_TOKEN_COMMENT: |
294 |
{ |
295 |
- char *expstr = json_object_get_string( |
296 |
+ const char *expstr = json_object_get_string( |
297 |
array_list_get_idx(items, 1)); |
298 |
const char *gotstr = (const char *) |
299 |
token->data.comment.ptr; |
300 |
@@ -432,9 +430,10 @@ |
301 |
break; |
302 |
case HUBBUB_TOKEN_CHARACTER: |
303 |
{ |
304 |
- int expstrlen; |
305 |
- char *expstr = json_object_get_string_len( |
306 |
- array_list_get_idx(items, 1), &expstrlen); |
307 |
+ int expstrlen = json_object_get_string_len( |
308 |
+ array_list_get_idx(items, 1)); |
309 |
+ const char *expstr = json_object_get_string( |
310 |
+ array_list_get_idx(items, 1)); |
311 |
const char *gotstr = (const char *) |
312 |
token->data.character.ptr; |
313 |
size_t len = min(token->data.character.len, |
314 |
--- hubbub-0.1.2/test/tree.c |
315 |
+++ hubbub-0.1.2/test/tree.c |
316 |
@@ -88,7 +88,7 @@ |
317 |
hubbub_parser *parser; |
318 |
hubbub_parser_optparams params; |
319 |
FILE *fp; |
320 |
- size_t len, origlen; |
321 |
+ size_t len; |
322 |
uint8_t *buf = alloca(CHUNK_SIZE); |
323 |
const char *charset; |
324 |
hubbub_charset_source cssource; |
325 |
@@ -123,7 +123,7 @@ |
326 |
} |
327 |
|
328 |
fseek(fp, 0, SEEK_END); |
329 |
- origlen = len = ftell(fp); |
330 |
+ len = ftell(fp); |
331 |
fseek(fp, 0, SEEK_SET); |
332 |
|
333 |
while (len > 0) { |