2 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
3 * opyright (c) 1999-2000 by Hewlett-Packard Company. All rights reserved.
5 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
6 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
8 * Permission is hereby granted to use or copy this program
9 * for any purpose, provided the above notices are retained on all copies.
10 * Permission to modify the code and to distribute modified code is granted,
11 * provided the above notices are retained, and a notice that the code was
12 * modified is included with the above copyright notice.
18 * Some simple primitives for allocation with explicit type information.
19 * Simple objects are allocated such that they contain a GC_descr at the
20 * end (in the last allocated word). This descriptor may be a procedure
21 * which then examines an extended descriptor passed as its environment.
23 * Arrays are treated as simple objects if they have sufficiently simple
24 * structure. Otherwise they are allocated from an array kind that supplies
25 * a special mark procedure. These arrays contain a pointer to a
26 * complex_descriptor as their last word.
27 * This is done because the environment field is too small, and the collector
28 * must trace the complex_descriptor.
30 * Note that descriptors inside objects may appear cleared, if we encounter a
31 * false refrence to an object on a free list. In the GC_descr case, this
32 * is OK, since a 0 descriptor corresponds to examining no fields.
33 * In the complex_descriptor case, we explicitly check for that case.
35 * MAJOR PARTS OF THIS CODE HAVE NOT BEEN TESTED AT ALL and are not testable,
36 * since they are not accessible through the current interface.
41 #include "private/gc_pmark.h"
44 # define TYPD_EXTRA_BYTES (sizeof(word) - EXTRA_BYTES)
46 GC_bool GC_explicit_typing_initialized = FALSE;
48 int GC_explicit_kind; /* Object kind for objects with indirect */
49 /* (possibly extended) descriptors. */
51 int GC_array_kind; /* Object kind for objects with complex */
52 /* descriptors and GC_array_mark_proc. */
54 /* Extended descriptors. GC_typed_mark_proc understands these. */
55 /* These are used for simple objects that are larger than what */
56 /* can be described by a BITMAP_BITS sized bitmap. */
58 word ed_bitmap; /* lsb corresponds to first word. */
59 GC_bool ed_continued; /* next entry is continuation. */
62 /* Array descriptors. GC_array_mark_proc understands these. */
63 /* We may eventually need to add provisions for headers and */
64 /* trailers. Hence we provide for tree structured descriptors, */
65 /* though we don't really use them currently. */
66 typedef union ComplexDescriptor {
67 struct LeafDescriptor { /* Describes simple array */
70 word ld_size; /* bytes per element */
71 /* multiple of ALIGNMENT */
72 word ld_nelements; /* Number of elements. */
73 GC_descr ld_descriptor; /* A simple length, bitmap, */
74 /* or procedure descriptor. */
76 struct ComplexArrayDescriptor {
80 union ComplexDescriptor * ad_element_descr;
82 struct SequenceDescriptor {
84 # define SEQUENCE_TAG 3
85 union ComplexDescriptor * sd_first;
86 union ComplexDescriptor * sd_second;
91 ext_descr * GC_ext_descriptors; /* Points to array of extended */
94 word GC_ed_size = 0; /* Current size of above arrays. */
95 # define ED_INITIAL_SIZE 100;
97 word GC_avail_descr = 0; /* Next available slot. */
99 int GC_typed_mark_proc_index; /* Indices of my mark */
100 int GC_array_mark_proc_index; /* procedures. */
102 /* Add a multiword bitmap to GC_ext_descriptors arrays. Return */
103 /* starting index. */
104 /* Returns -1 on failure. */
105 /* Caller does not hold allocation lock. */
106 signed_word GC_add_ext_descriptor(bm, nbits)
110 register size_t nwords = divWORDSZ(nbits + WORDSZ-1);
111 register signed_word result;
113 register word last_part;
114 register int extra_bits;
119 while (GC_avail_descr + nwords >= GC_ed_size) {
122 word ed_size = GC_ed_size;
127 new_size = ED_INITIAL_SIZE;
129 new_size = 2 * ed_size;
130 if (new_size > MAX_ENV) return(-1);
132 new = (ext_descr *) GC_malloc_atomic(new_size * sizeof(ext_descr));
133 if (new == 0) return(-1);
136 if (ed_size == GC_ed_size) {
137 if (GC_avail_descr != 0) {
138 BCOPY(GC_ext_descriptors, new,
139 GC_avail_descr * sizeof(ext_descr));
141 GC_ed_size = new_size;
142 GC_ext_descriptors = new;
143 } /* else another thread already resized it in the meantime */
145 result = GC_avail_descr;
146 for (i = 0; i < nwords-1; i++) {
147 GC_ext_descriptors[result + i].ed_bitmap = bm[i];
148 GC_ext_descriptors[result + i].ed_continued = TRUE;
151 /* Clear irrelevant bits. */
152 extra_bits = nwords * WORDSZ - nbits;
153 last_part <<= extra_bits;
154 last_part >>= extra_bits;
155 GC_ext_descriptors[result + i].ed_bitmap = last_part;
156 GC_ext_descriptors[result + i].ed_continued = FALSE;
157 GC_avail_descr += nwords;
163 /* Table of bitmap descriptors for n word long all pointer objects. */
164 GC_descr GC_bm_table[WORDSZ/2];
166 /* Return a descriptor for the concatenation of 2 nwords long objects, */
167 /* each of which is described by descriptor. */
168 /* The result is known to be short enough to fit into a bitmap */
170 /* Descriptor is a GC_DS_LENGTH or GC_DS_BITMAP descriptor. */
171 GC_descr GC_double_descr(descriptor, nwords)
172 register GC_descr descriptor;
173 register word nwords;
175 if ((descriptor & GC_DS_TAGS) == GC_DS_LENGTH) {
176 descriptor = GC_bm_table[BYTES_TO_WORDS((word)descriptor)];
178 descriptor |= (descriptor & ~GC_DS_TAGS) >> nwords;
182 complex_descriptor * GC_make_sequence_descriptor();
184 /* Build a descriptor for an array with nelements elements, */
185 /* each of which can be described by a simple descriptor. */
186 /* We try to optimize some common cases. */
187 /* If the result is COMPLEX, then a complex_descr* is returned */
189 /* If the result is LEAF, then we built a LeafDescriptor in */
190 /* the structure pointed to by leaf. */
191 /* The tag in the leaf structure is not set. */
192 /* If the result is SIMPLE, then a GC_descr */
193 /* is returned in *simple_d. */
194 /* If the result is NO_MEM, then */
195 /* we failed to allocate the descriptor. */
196 /* The implementation knows that GC_DS_LENGTH is 0. */
197 /* *leaf, *complex_d, and *simple_d may be used as temporaries */
198 /* during the construction. */
203 int GC_make_array_descriptor(nelements, size, descriptor,
204 simple_d, complex_d, leaf)
209 complex_descriptor **complex_d;
210 struct LeafDescriptor * leaf;
212 # define OPT_THRESHOLD 50
213 /* For larger arrays, we try to combine descriptors of adjacent */
214 /* descriptors to speed up marking, and to reduce the amount */
215 /* of space needed on the mark stack. */
216 if ((descriptor & GC_DS_TAGS) == GC_DS_LENGTH) {
217 if ((word)descriptor == size) {
218 *simple_d = nelements * descriptor;
220 } else if ((word)descriptor == 0) {
221 *simple_d = (GC_descr)0;
225 if (nelements <= OPT_THRESHOLD) {
226 if (nelements <= 1) {
227 if (nelements == 1) {
228 *simple_d = descriptor;
231 *simple_d = (GC_descr)0;
235 } else if (size <= BITMAP_BITS/2
236 && (descriptor & GC_DS_TAGS) != GC_DS_PROC
237 && (size & (sizeof(word)-1)) == 0) {
239 GC_make_array_descriptor(nelements/2, 2*size,
240 GC_double_descr(descriptor,
241 BYTES_TO_WORDS(size)),
242 simple_d, complex_d, leaf);
243 if ((nelements & 1) == 0) {
246 struct LeafDescriptor * one_element =
247 (struct LeafDescriptor *)
248 GC_malloc_atomic(sizeof(struct LeafDescriptor));
250 if (result == NO_MEM || one_element == 0) return(NO_MEM);
251 one_element -> ld_tag = LEAF_TAG;
252 one_element -> ld_size = size;
253 one_element -> ld_nelements = 1;
254 one_element -> ld_descriptor = descriptor;
258 struct LeafDescriptor * beginning =
259 (struct LeafDescriptor *)
260 GC_malloc_atomic(sizeof(struct LeafDescriptor));
261 if (beginning == 0) return(NO_MEM);
262 beginning -> ld_tag = LEAF_TAG;
263 beginning -> ld_size = size;
264 beginning -> ld_nelements = 1;
265 beginning -> ld_descriptor = *simple_d;
266 *complex_d = GC_make_sequence_descriptor(
267 (complex_descriptor *)beginning,
268 (complex_descriptor *)one_element);
273 struct LeafDescriptor * beginning =
274 (struct LeafDescriptor *)
275 GC_malloc_atomic(sizeof(struct LeafDescriptor));
276 if (beginning == 0) return(NO_MEM);
277 beginning -> ld_tag = LEAF_TAG;
278 beginning -> ld_size = leaf -> ld_size;
279 beginning -> ld_nelements = leaf -> ld_nelements;
280 beginning -> ld_descriptor = leaf -> ld_descriptor;
281 *complex_d = GC_make_sequence_descriptor(
282 (complex_descriptor *)beginning,
283 (complex_descriptor *)one_element);
287 *complex_d = GC_make_sequence_descriptor(
289 (complex_descriptor *)one_element);
296 leaf -> ld_size = size;
297 leaf -> ld_nelements = nelements;
298 leaf -> ld_descriptor = descriptor;
303 complex_descriptor * GC_make_sequence_descriptor(first, second)
304 complex_descriptor * first;
305 complex_descriptor * second;
307 struct SequenceDescriptor * result =
308 (struct SequenceDescriptor *)
309 GC_malloc(sizeof(struct SequenceDescriptor));
310 /* Can't result in overly conservative marking, since tags are */
311 /* very small integers. Probably faster than maintaining type */
314 result -> sd_tag = SEQUENCE_TAG;
315 result -> sd_first = first;
316 result -> sd_second = second;
318 return((complex_descriptor *)result);
322 complex_descriptor * GC_make_complex_array_descriptor(nelements, descr)
324 complex_descriptor * descr;
326 struct ComplexArrayDescriptor * result =
327 (struct ComplexArrayDescriptor *)
328 GC_malloc(sizeof(struct ComplexArrayDescriptor));
331 result -> ad_tag = ARRAY_TAG;
332 result -> ad_nelements = nelements;
333 result -> ad_element_descr = descr;
335 return((complex_descriptor *)result);
339 ptr_t * GC_eobjfreelist;
341 ptr_t * GC_arobjfreelist;
343 mse * GC_typed_mark_proc GC_PROTO((register word * addr,
344 register mse * mark_stack_ptr,
345 mse * mark_stack_limit,
348 mse * GC_array_mark_proc GC_PROTO((register word * addr,
349 register mse * mark_stack_ptr,
350 mse * mark_stack_limit,
353 /* Caller does not hold allocation lock. */
354 void GC_init_explicit_typing()
361 if (sizeof(struct LeafDescriptor) % sizeof(word) != 0)
362 ABORT("Bad leaf descriptor size");
366 if (GC_explicit_typing_initialized) {
371 GC_explicit_typing_initialized = TRUE;
372 /* Set up object kind with simple indirect descriptor. */
373 GC_eobjfreelist = (ptr_t *)GC_new_free_list_inner();
374 GC_explicit_kind = GC_new_kind_inner(
375 (void **)GC_eobjfreelist,
376 (((word)WORDS_TO_BYTES(-1)) | GC_DS_PER_OBJECT),
378 /* Descriptors are in the last word of the object. */
379 GC_typed_mark_proc_index = GC_new_proc_inner(GC_typed_mark_proc);
380 /* Set up object kind with array descriptor. */
381 GC_arobjfreelist = (ptr_t *)GC_new_free_list_inner();
382 GC_array_mark_proc_index = GC_new_proc_inner(GC_array_mark_proc);
383 GC_array_kind = GC_new_kind_inner(
384 (void **)GC_arobjfreelist,
385 GC_MAKE_PROC(GC_array_mark_proc_index, 0),
387 for (i = 0; i < WORDSZ/2; i++) {
388 GC_descr d = (((word)(-1)) >> (WORDSZ - i)) << (WORDSZ - i);
396 # if defined(__STDC__) || defined(__cplusplus)
397 mse * GC_typed_mark_proc(register word * addr,
398 register mse * mark_stack_ptr,
399 mse * mark_stack_limit,
402 mse * GC_typed_mark_proc(addr, mark_stack_ptr, mark_stack_limit, env)
403 register word * addr;
404 register mse * mark_stack_ptr;
405 mse * mark_stack_limit;
409 register word bm = GC_ext_descriptors[env].ed_bitmap;
410 register word * current_p = addr;
411 register word current;
412 register ptr_t greatest_ha = GC_greatest_plausible_heap_addr;
413 register ptr_t least_ha = GC_least_plausible_heap_addr;
415 for (; bm != 0; bm >>= 1, current_p++) {
417 current = *current_p;
418 FIXUP_POINTER(current);
419 if ((ptr_t)current >= least_ha && (ptr_t)current <= greatest_ha) {
420 PUSH_CONTENTS((ptr_t)current, mark_stack_ptr,
421 mark_stack_limit, current_p, exit1);
425 if (GC_ext_descriptors[env].ed_continued) {
426 /* Push an entry with the rest of the descriptor back onto the */
427 /* stack. Thus we never do too much work at once. Note that */
428 /* we also can't overflow the mark stack unless we actually */
429 /* mark something. */
431 if (mark_stack_ptr >= mark_stack_limit) {
432 mark_stack_ptr = GC_signal_mark_stack_overflow(mark_stack_ptr);
434 mark_stack_ptr -> mse_start = addr + WORDSZ;
435 mark_stack_ptr -> mse_descr =
436 GC_MAKE_PROC(GC_typed_mark_proc_index, env+1);
438 return(mark_stack_ptr);
441 /* Return the size of the object described by d. It would be faster to */
442 /* store this directly, or to compute it as part of */
443 /* GC_push_complex_descriptor, but hopefully it doesn't matter. */
444 word GC_descr_obj_size(d)
445 register complex_descriptor *d;
449 return(d -> ld.ld_nelements * d -> ld.ld_size);
451 return(d -> ad.ad_nelements
452 * GC_descr_obj_size(d -> ad.ad_element_descr));
454 return(GC_descr_obj_size(d -> sd.sd_first)
455 + GC_descr_obj_size(d -> sd.sd_second));
457 ABORT("Bad complex descriptor");
458 /*NOTREACHED*/ return 0; /*NOTREACHED*/
462 /* Push descriptors for the object at addr with complex descriptor d */
463 /* onto the mark stack. Return 0 if the mark stack overflowed. */
464 mse * GC_push_complex_descriptor(addr, d, msp, msl)
466 register complex_descriptor *d;
470 register ptr_t current = (ptr_t) addr;
471 register word nelements;
478 register GC_descr descr = d -> ld.ld_descriptor;
480 nelements = d -> ld.ld_nelements;
481 if (msl - msp <= (ptrdiff_t)nelements) return(0);
482 sz = d -> ld.ld_size;
483 for (i = 0; i < nelements; i++) {
485 msp -> mse_start = (word *)current;
486 msp -> mse_descr = descr;
493 register complex_descriptor *descr = d -> ad.ad_element_descr;
495 nelements = d -> ad.ad_nelements;
496 sz = GC_descr_obj_size(descr);
497 for (i = 0; i < nelements; i++) {
498 msp = GC_push_complex_descriptor((word *)current, descr,
500 if (msp == 0) return(0);
507 sz = GC_descr_obj_size(d -> sd.sd_first);
508 msp = GC_push_complex_descriptor((word *)current, d -> sd.sd_first,
510 if (msp == 0) return(0);
512 msp = GC_push_complex_descriptor((word *)current, d -> sd.sd_second,
517 ABORT("Bad complex descriptor");
518 /*NOTREACHED*/ return 0; /*NOTREACHED*/
523 # if defined(__STDC__) || defined(__cplusplus)
524 mse * GC_array_mark_proc(register word * addr,
525 register mse * mark_stack_ptr,
526 mse * mark_stack_limit,
529 mse * GC_array_mark_proc(addr, mark_stack_ptr, mark_stack_limit, env)
530 register word * addr;
531 register mse * mark_stack_ptr;
532 mse * mark_stack_limit;
536 register hdr * hhdr = HDR(addr);
537 register word sz = hhdr -> hb_sz;
538 register complex_descriptor * descr = (complex_descriptor *)(addr[sz-1]);
539 mse * orig_mark_stack_ptr = mark_stack_ptr;
540 mse * new_mark_stack_ptr;
543 /* Found a reference to a free list entry. Ignore it. */
544 return(orig_mark_stack_ptr);
546 /* In use counts were already updated when array descriptor was */
547 /* pushed. Here we only replace it by subobject descriptors, so */
548 /* no update is necessary. */
549 new_mark_stack_ptr = GC_push_complex_descriptor(addr, descr,
552 if (new_mark_stack_ptr == 0) {
553 /* Doesn't fit. Conservatively push the whole array as a unit */
554 /* and request a mark stack expansion. */
555 /* This cannot cause a mark stack overflow, since it replaces */
556 /* the original array entry. */
557 GC_mark_stack_too_small = TRUE;
558 new_mark_stack_ptr = orig_mark_stack_ptr + 1;
559 new_mark_stack_ptr -> mse_start = addr;
560 new_mark_stack_ptr -> mse_descr = WORDS_TO_BYTES(sz) | GC_DS_LENGTH;
562 /* Push descriptor itself */
563 new_mark_stack_ptr++;
564 new_mark_stack_ptr -> mse_start = addr + sz - 1;
565 new_mark_stack_ptr -> mse_descr = sizeof(word) | GC_DS_LENGTH;
567 return(new_mark_stack_ptr);
570 #if defined(__STDC__) || defined(__cplusplus)
571 GC_descr GC_make_descriptor(GC_bitmap bm, size_t len)
573 GC_descr GC_make_descriptor(bm, len)
578 register signed_word last_set_bit = len - 1;
579 register word result;
581 # define HIGH_BIT (((word)1) << (WORDSZ - 1))
583 if (!GC_explicit_typing_initialized) GC_init_explicit_typing();
584 while (last_set_bit >= 0 && !GC_get_bit(bm, last_set_bit)) last_set_bit --;
585 if (last_set_bit < 0) return(0 /* no pointers */);
586 # if ALIGNMENT == CPP_WORDSZ/8
588 register GC_bool all_bits_set = TRUE;
589 for (i = 0; i < last_set_bit; i++) {
590 if (!GC_get_bit(bm, i)) {
591 all_bits_set = FALSE;
596 /* An initial section contains all pointers. Use length descriptor. */
597 return(WORDS_TO_BYTES(last_set_bit+1) | GC_DS_LENGTH);
601 if (last_set_bit < BITMAP_BITS) {
602 /* Hopefully the common case. */
603 /* Build bitmap descriptor (with bits reversed) */
605 for (i = last_set_bit - 1; i >= 0; i--) {
607 if (GC_get_bit(bm, i)) result |= HIGH_BIT;
609 result |= GC_DS_BITMAP;
614 index = GC_add_ext_descriptor(bm, (word)last_set_bit+1);
615 if (index == -1) return(WORDS_TO_BYTES(last_set_bit+1) | GC_DS_LENGTH);
616 /* Out of memory: use conservative */
618 result = GC_MAKE_PROC(GC_typed_mark_proc_index, (word)index);
623 ptr_t GC_clear_stack();
625 #define GENERAL_MALLOC(lb,k) \
626 (GC_PTR)GC_clear_stack(GC_generic_malloc((word)lb, k))
628 #define GENERAL_MALLOC_IOP(lb,k) \
629 (GC_PTR)GC_clear_stack(GC_generic_malloc_ignore_off_page(lb, k))
631 #if defined(__STDC__) || defined(__cplusplus)
632 void * GC_malloc_explicitly_typed(size_t lb, GC_descr d)
634 char * GC_malloc_explicitly_typed(lb, d)
640 register ptr_t * opp;
644 lb += TYPD_EXTRA_BYTES;
645 if( SMALL_OBJ(lb) ) {
647 lw = GC_size_map[lb];
649 lw = ALIGNED_WORDS(lb);
651 opp = &(GC_eobjfreelist[lw]);
653 if( !FASTLOCK_SUCCEEDED() || (op = *opp) == 0 ) {
655 op = (ptr_t)GENERAL_MALLOC((word)lb, GC_explicit_kind);
656 if (0 == op) return 0;
658 lw = GC_size_map[lb]; /* May have been uninitialized. */
663 GC_words_allocd += lw;
667 op = (ptr_t)GENERAL_MALLOC((word)lb, GC_explicit_kind);
669 lw = BYTES_TO_WORDS(GC_size(op));
672 ((word *)op)[lw - 1] = d;
676 #if defined(__STDC__) || defined(__cplusplus)
677 void * GC_malloc_explicitly_typed_ignore_off_page(size_t lb, GC_descr d)
679 char * GC_malloc_explicitly_typed_ignore_off_page(lb, d)
685 register ptr_t * opp;
689 lb += TYPD_EXTRA_BYTES;
690 if( SMALL_OBJ(lb) ) {
692 lw = GC_size_map[lb];
694 lw = ALIGNED_WORDS(lb);
696 opp = &(GC_eobjfreelist[lw]);
698 if( !FASTLOCK_SUCCEEDED() || (op = *opp) == 0 ) {
700 op = (ptr_t)GENERAL_MALLOC_IOP(lb, GC_explicit_kind);
702 lw = GC_size_map[lb]; /* May have been uninitialized. */
707 GC_words_allocd += lw;
711 op = (ptr_t)GENERAL_MALLOC_IOP(lb, GC_explicit_kind);
713 lw = BYTES_TO_WORDS(GC_size(op));
716 ((word *)op)[lw - 1] = d;
720 #if defined(__STDC__) || defined(__cplusplus)
721 void * GC_calloc_explicitly_typed(size_t n,
725 char * GC_calloc_explicitly_typed(n, lb, d)
732 register ptr_t * opp;
734 GC_descr simple_descr;
735 complex_descriptor *complex_descr;
736 register int descr_type;
737 struct LeafDescriptor leaf;
740 descr_type = GC_make_array_descriptor((word)n, (word)lb, d,
741 &simple_descr, &complex_descr, &leaf);
743 case NO_MEM: return(0);
744 case SIMPLE: return(GC_malloc_explicitly_typed(n*lb, simple_descr));
747 lb += sizeof(struct LeafDescriptor) + TYPD_EXTRA_BYTES;
751 lb += TYPD_EXTRA_BYTES;
754 if( SMALL_OBJ(lb) ) {
756 lw = GC_size_map[lb];
758 lw = ALIGNED_WORDS(lb);
760 opp = &(GC_arobjfreelist[lw]);
762 if( !FASTLOCK_SUCCEEDED() || (op = *opp) == 0 ) {
764 op = (ptr_t)GENERAL_MALLOC((word)lb, GC_array_kind);
765 if (0 == op) return(0);
767 lw = GC_size_map[lb]; /* May have been uninitialized. */
772 GC_words_allocd += lw;
776 op = (ptr_t)GENERAL_MALLOC((word)lb, GC_array_kind);
777 if (0 == op) return(0);
778 lw = BYTES_TO_WORDS(GC_size(op));
780 if (descr_type == LEAF) {
781 /* Set up the descriptor inside the object itself. */
782 VOLATILE struct LeafDescriptor * lp =
783 (struct LeafDescriptor *)
785 + lw - (BYTES_TO_WORDS(sizeof(struct LeafDescriptor)) + 1));
787 lp -> ld_tag = LEAF_TAG;
788 lp -> ld_size = leaf.ld_size;
789 lp -> ld_nelements = leaf.ld_nelements;
790 lp -> ld_descriptor = leaf.ld_descriptor;
791 ((VOLATILE word *)op)[lw - 1] = (word)lp;
793 extern unsigned GC_finalization_failures;
794 unsigned ff = GC_finalization_failures;
796 ((word *)op)[lw - 1] = (word)complex_descr;
797 /* Make sure the descriptor is cleared once there is any danger */
798 /* it may have been collected. */
800 GC_general_register_disappearing_link((GC_PTR *)
803 if (ff != GC_finalization_failures) {
804 /* Couldn't register it due to lack of memory. Punt. */
805 /* This will probably fail too, but gives the recovery code */
807 return(GC_malloc(n*lb));