Home
last modified time | relevance | path

Searched refs:bm (Results 1 – 25 of 37) sorted by relevance

12

/drivers/staging/comedi/
Dcomedi_buf.c33 struct comedi_buf_map *bm = in comedi_buf_map_kref_release() local
38 if (bm->page_list) { in comedi_buf_map_kref_release()
39 for (i = 0; i < bm->n_pages; i++) { in comedi_buf_map_kref_release()
40 buf = &bm->page_list[i]; in comedi_buf_map_kref_release()
43 if (bm->dma_dir != DMA_NONE) { in comedi_buf_map_kref_release()
45 dma_free_coherent(bm->dma_hw_dev, in comedi_buf_map_kref_release()
54 vfree(bm->page_list); in comedi_buf_map_kref_release()
56 if (bm->dma_dir != DMA_NONE) in comedi_buf_map_kref_release()
57 put_device(bm->dma_hw_dev); in comedi_buf_map_kref_release()
58 kfree(bm); in comedi_buf_map_kref_release()
[all …]
Dcomedi_internal.h31 void comedi_buf_map_get(struct comedi_buf_map *bm);
32 int comedi_buf_map_put(struct comedi_buf_map *bm);
33 int comedi_buf_map_access(struct comedi_buf_map *bm, unsigned long offset,
Dcomedi_fops.c2154 struct comedi_buf_map *bm; in comedi_vm_open() local
2156 bm = area->vm_private_data; in comedi_vm_open()
2157 comedi_buf_map_get(bm); in comedi_vm_open()
2162 struct comedi_buf_map *bm; in comedi_vm_close() local
2164 bm = area->vm_private_data; in comedi_vm_close()
2165 comedi_buf_map_put(bm); in comedi_vm_close()
2171 struct comedi_buf_map *bm = vma->vm_private_data; in comedi_vm_access() local
2179 return comedi_buf_map_access(bm, offset, buf, len, write); in comedi_vm_access()
2194 struct comedi_buf_map *bm = NULL; in comedi_mmap() local
2250 bm = comedi_buf_map_from_subdev_get(s); in comedi_mmap()
[all …]
/drivers/md/persistent-data/
Ddm-block-manager.c384 struct dm_block_manager *bm; in dm_block_manager_create() local
386 bm = kmalloc(sizeof(*bm), GFP_KERNEL); in dm_block_manager_create()
387 if (!bm) { in dm_block_manager_create()
392 bm->bufio = dm_bufio_client_create(bdev, block_size, max_held_per_thread, in dm_block_manager_create()
396 if (IS_ERR(bm->bufio)) { in dm_block_manager_create()
397 r = PTR_ERR(bm->bufio); in dm_block_manager_create()
398 kfree(bm); in dm_block_manager_create()
402 bm->read_only = false; in dm_block_manager_create()
404 return bm; in dm_block_manager_create()
411 void dm_block_manager_destroy(struct dm_block_manager *bm) in dm_block_manager_destroy() argument
[all …]
Ddm-block-manager.h37 void dm_block_manager_destroy(struct dm_block_manager *bm);
39 unsigned dm_bm_block_size(struct dm_block_manager *bm);
40 dm_block_t dm_bm_nr_blocks(struct dm_block_manager *bm);
73 int dm_bm_read_lock(struct dm_block_manager *bm, dm_block_t b,
77 int dm_bm_write_lock(struct dm_block_manager *bm, dm_block_t b,
85 int dm_bm_read_try_lock(struct dm_block_manager *bm, dm_block_t b,
93 int dm_bm_write_lock_zero(struct dm_block_manager *bm, dm_block_t b,
108 int dm_bm_flush(struct dm_block_manager *bm);
113 void dm_bm_prefetch(struct dm_block_manager *bm, dm_block_t b);
126 bool dm_bm_is_read_only(struct dm_block_manager *bm);
[all …]
Ddm-transaction-manager.c60 static void prefetch_issue(struct prefetch_set *p, struct dm_block_manager *bm) in prefetch_issue() argument
68 dm_bm_prefetch(bm, p->blocks[i]); in prefetch_issue()
92 struct dm_block_manager *bm; member
160 static struct dm_transaction_manager *dm_tm_create(struct dm_block_manager *bm, in dm_tm_create() argument
172 tm->bm = bm; in dm_tm_create()
218 return dm_bm_flush(tm->bm); in dm_tm_pre_commit()
230 return dm_bm_flush(tm->bm); in dm_tm_commit()
248 r = dm_bm_write_lock_zero(tm->bm, new_block, v, result); in dm_tm_new_block()
279 r = dm_bm_read_lock(tm->bm, orig, v, &orig_block); in __shadow_block()
290 r = dm_bm_write_lock_zero(tm->bm, new, v, result); in __shadow_block()
[all …]
Ddm-transaction-manager.h128 int dm_tm_create_with_sm(struct dm_block_manager *bm, dm_block_t sb_location,
132 int dm_tm_open_with_sm(struct dm_block_manager *bm, dm_block_t sb_location,
/drivers/uwb/
Ddrp.c275 bitmap_and(mv->companion_mas.bm, rsv->mas.bm, conflicting_mas->bm, UWB_NUM_MAS); in handle_conflict_normal()
335 bitmap_andnot(mv->companion_mas.bm, rsv->mas.bm, in handle_conflict_expanding()
336 conflicting_mas->bm, UWB_NUM_MAS); in handle_conflict_expanding()
355 if (bitmap_intersects(rsv->mas.bm, conflicting_mas->bm, in uwb_drp_handle_conflict_rsv()
361 if (bitmap_intersects(mv->companion_mas.bm, in uwb_drp_handle_conflict_rsv()
362 conflicting_mas->bm, UWB_NUM_MAS)) { in uwb_drp_handle_conflict_rsv()
368 } else if (bitmap_intersects(rsv->mas.bm, conflicting_mas->bm, in uwb_drp_handle_conflict_rsv()
404 if (!bitmap_equal(rsv->mas.bm, mas->bm, UWB_NUM_MAS)) { in uwb_drp_process_target_accepted()
410 if (!bitmap_equal(rsv->mas.bm, mas->bm, UWB_NUM_MAS)) { in uwb_drp_process_target_accepted()
421 bitmap_copy(mv->companion_mas.bm, mas->bm, in uwb_drp_process_target_accepted()
[all …]
Dallocator.c27 unsigned char *bm = ai->bm; in uwb_rsv_fill_column_alloc() local
37 if (bm[col * UWB_MAS_PER_ZONE + mas] == 0) { in uwb_rsv_fill_column_alloc()
48 bm[col * UWB_MAS_PER_ZONE + mas] = c; in uwb_rsv_fill_column_alloc()
57 unsigned char *bm = ai->bm; in uwb_rsv_fill_row_alloc() local
73 if (bm[col * UWB_NUM_ZONES + mas] != UWB_RSV_MAS_NOT_AVAIL) { in uwb_rsv_fill_row_alloc()
74 bm[col * UWB_NUM_ZONES + mas] = c; in uwb_rsv_fill_row_alloc()
206 unsigned char *bm = ai->bm; in get_row_descriptors() local
214 if (bm[col * UWB_NUM_ZONES + mas] == UWB_RSV_MAS_NOT_AVAIL) { in get_row_descriptors()
223 static void uwb_rsv_fill_column_info(unsigned char *bm, int column, struct uwb_rsv_col_info *rci) in uwb_rsv_fill_column_info() argument
236 if (!bm[column * UWB_NUM_ZONES + mas]) { in uwb_rsv_fill_column_info()
[all …]
Ddrp-avail.c63 bitmap_and(avail->bm, rc->drp_avail.global, rc->drp_avail.local, UWB_NUM_MAS); in uwb_drp_available()
64 bitmap_and(avail->bm, avail->bm, rc->drp_avail.pending, UWB_NUM_MAS); in uwb_drp_available()
79 if (!bitmap_subset(mas->bm, avail.bm, UWB_NUM_MAS)) in uwb_drp_avail_reserve_pending()
82 bitmap_andnot(rc->drp_avail.pending, rc->drp_avail.pending, mas->bm, UWB_NUM_MAS); in uwb_drp_avail_reserve_pending()
93 bitmap_or(rc->drp_avail.pending, rc->drp_avail.pending, mas->bm, UWB_NUM_MAS); in uwb_drp_avail_reserve()
94 bitmap_andnot(rc->drp_avail.local, rc->drp_avail.local, mas->bm, UWB_NUM_MAS); in uwb_drp_avail_reserve()
105 bitmap_or(rc->drp_avail.local, rc->drp_avail.local, mas->bm, UWB_NUM_MAS); in uwb_drp_avail_release()
106 bitmap_or(rc->drp_avail.pending, rc->drp_avail.pending, mas->bm, UWB_NUM_MAS); in uwb_drp_avail_release()
121 bitmap_and(avail.bm, rc->drp_avail.global, rc->drp_avail.local, UWB_NUM_MAS); in uwb_drp_avail_ie_update()
Drsv.c340 bitmap_andnot(rsv->mas.bm, rsv->mas.bm, mv->companion_mas.bm, UWB_NUM_MAS); in uwb_rsv_set_state()
360 bitmap_or(rsv->mas.bm, rsv->mas.bm, mv->companion_mas.bm, UWB_NUM_MAS); in uwb_rsv_set_state()
366 bitmap_andnot(mv->companion_mas.bm, rsv->mas.bm, mv->final_mas.bm, UWB_NUM_MAS); in uwb_rsv_set_state()
370 bitmap_copy(rsv->mas.bm, mv->final_mas.bm, UWB_NUM_MAS); in uwb_rsv_set_state()
635 if (!bitmap_equal(rsv->mas.bm, mv->final_mas.bm, UWB_NUM_MAS)) { in uwb_rsv_try_move()
637 bitmap_andnot(mv->companion_mas.bm, mv->final_mas.bm, rsv->mas.bm, UWB_NUM_MAS); in uwb_rsv_try_move()
664 bitmap_or(mas.bm, mas.bm, rsv->mas.bm, UWB_NUM_MAS); in uwb_rsv_handle_drp_avail_change()
804 bitmap_zero(mas->bm, UWB_NUM_MAS); in uwb_rsv_get_usable_mas()
805 bitmap_andnot(mas->bm, rsv->mas.bm, rsv->rc->cnflt_alien_bitmap.bm, UWB_NUM_MAS); in uwb_rsv_get_usable_mas()
Ddrp-ie.c152 bitmap_copy(tmp_bmp, mas->bm, UWB_NUM_MAS); in uwb_drp_ie_from_bm()
269 void uwb_drp_ie_single_zone_to_bm(struct uwb_mas_bm *bm, u8 zone, u16 mas_bm) in uwb_drp_ie_single_zone_to_bm() argument
277 set_bit(zone * UWB_NUM_ZONES + mas, bm->bm); in uwb_drp_ie_single_zone_to_bm()
296 void uwb_drp_ie_to_bm(struct uwb_mas_bm *bm, const struct uwb_ie_drp *drp_ie) in uwb_drp_ie_to_bm() argument
305 bitmap_zero(bm->bm, UWB_NUM_MAS); in uwb_drp_ie_to_bm()
314 uwb_drp_ie_single_zone_to_bm(bm, zone, mas_bm); in uwb_drp_ie_to_bm()
/drivers/power/supply/
Dabx500_chargalg.c255 struct abx500_bm_data *bm; member
362 di->bm->bat_type[di->bm->batt_id].normal_vol_lvl, in abx500_chargalg_check_charger_enable()
363 di->bm->bat_type[di->bm->batt_id].normal_cur_lvl); in abx500_chargalg_check_charger_enable()
367 di->bm->bat_type[di->bm->batt_id].normal_vol_lvl, in abx500_chargalg_check_charger_enable()
368 di->bm->bat_type[di->bm->batt_id].normal_cur_lvl); in abx500_chargalg_check_charger_enable()
447 timer_expiration = di->bm->main_safety_tmr_h; in abx500_chargalg_start_safety_timer()
451 timer_expiration = di->bm->usb_safety_tmr_h; in abx500_chargalg_start_safety_timer()
760 if (di->batt_data.temp > (di->bm->temp_low + di->t_hyst_norm) && in abx500_chargalg_check_temp()
761 di->batt_data.temp < (di->bm->temp_high - di->t_hyst_norm)) { in abx500_chargalg_check_temp()
768 if (((di->batt_data.temp >= di->bm->temp_high) && in abx500_chargalg_check_temp()
[all …]
Dab8500_btemp.c101 struct abx500_bm_data *bm; member
153 if (di->bm->adc_therm == ABx500_ADC_THERM_BATCTRL) { in ab8500_btemp_batctrl_volt_to_res()
159 - di->bm->gnd_lift_resistance * inst_curr) in ab8500_btemp_batctrl_volt_to_res()
215 if (di->bm->adc_therm == ABx500_ADC_THERM_BATCTRL && enable) { in ab8500_btemp_curr_source_enable()
260 } else if (di->bm->adc_therm == ABx500_ADC_THERM_BATCTRL && !enable) { in ab8500_btemp_curr_source_enable()
505 id = di->bm->batt_id; in ab8500_btemp_measure_temp()
507 if (di->bm->adc_therm == ABx500_ADC_THERM_BATCTRL && in ab8500_btemp_measure_temp()
522 di->bm->bat_type[id].r_to_t_tbl, in ab8500_btemp_measure_temp()
523 di->bm->bat_type[id].n_temp_tbl_elements, rbat); in ab8500_btemp_measure_temp()
539 di->bm->bat_type[id].r_to_t_tbl, in ab8500_btemp_measure_temp()
[all …]
Dab8500_bmdata.c557 struct abx500_bm_data *bm) in ab8500_bm_of_probe() argument
578 bm->no_maintenance = true; in ab8500_bm_of_probe()
579 bm->chg_unknown_bat = true; in ab8500_bm_of_probe()
580 bm->bat_type[BATTERY_UNKNOWN].charge_full_design = 2600; in ab8500_bm_of_probe()
581 bm->bat_type[BATTERY_UNKNOWN].termination_vol = 4150; in ab8500_bm_of_probe()
582 bm->bat_type[BATTERY_UNKNOWN].recharge_cap = 95; in ab8500_bm_of_probe()
583 bm->bat_type[BATTERY_UNKNOWN].normal_cur_lvl = 520; in ab8500_bm_of_probe()
584 bm->bat_type[BATTERY_UNKNOWN].normal_vol_lvl = 4200; in ab8500_bm_of_probe()
593 bm->n_btypes = 4; in ab8500_bm_of_probe()
594 bm->bat_type = bat_type_ext_thermistor; in ab8500_bm_of_probe()
[all …]
Dab8500_fg.c228 struct abx500_bm_data *bm; member
366 if (curr > -di->bm->fg_params->high_curr_threshold) in ab8500_fg_is_low_curr()
675 (1000 * di->bm->fg_res); in ab8500_fg_inst_curr_finalize()
799 (100 * di->bm->fg_res); in ab8500_fg_acc_cur_work()
808 (1000 * di->bm->fg_res * (di->fg_samples / 4)); in ab8500_fg_acc_cur_work()
817 di->bm->fg_res, di->fg_samples, val, di->accu_charge); in ab8500_fg_acc_cur_work()
862 tbl = di->bm->bat_type[di->bm->batt_id].v_to_cap_tbl, in ab8500_fg_volt_to_capacity()
863 tbl_size = di->bm->bat_type[di->bm->batt_id].n_v_cap_tbl_elements; in ab8500_fg_volt_to_capacity()
914 tbl = di->bm->bat_type[di->bm->batt_id].batres_tbl; in ab8500_fg_battery_resistance()
915 tbl_size = di->bm->bat_type[di->bm->batt_id].n_batres_tbl_elements; in ab8500_fg_battery_resistance()
[all …]
Dab8500_charger.c289 struct abx500_bm_data *bm; member
1009 if (curr < di->bm->chg_output_curr[0]) in ab8500_current_to_regval()
1012 for (i = 0; i < di->bm->n_chg_out_curr; i++) { in ab8500_current_to_regval()
1013 if (curr < di->bm->chg_output_curr[i]) in ab8500_current_to_regval()
1018 i = di->bm->n_chg_out_curr - 1; in ab8500_current_to_regval()
1019 if (curr == di->bm->chg_output_curr[i]) in ab8500_current_to_regval()
1029 if (curr < di->bm->chg_input_curr[0]) in ab8500_vbus_in_curr_to_regval()
1032 for (i = 0; i < di->bm->n_chg_in_curr; i++) { in ab8500_vbus_in_curr_to_regval()
1033 if (curr < di->bm->chg_input_curr[i]) in ab8500_vbus_in_curr_to_regval()
1038 i = di->bm->n_chg_in_curr - 1; in ab8500_vbus_in_curr_to_regval()
[all …]
/drivers/block/drbd/
Ddrbd_bitmap.c499 unsigned long *p_addr, *bm; in bm_clear_surplus() local
512 bm = p_addr + (tmp/BITS_PER_LONG); in bm_clear_surplus()
518 cleared = hweight_long(*bm & ~mask); in bm_clear_surplus()
519 *bm &= mask; in bm_clear_surplus()
520 bm++; in bm_clear_surplus()
523 if (BITS_PER_LONG == 32 && ((bm - p_addr) & 1) == 1) { in bm_clear_surplus()
526 cleared += hweight_long(*bm); in bm_clear_surplus()
527 *bm = 0; in bm_clear_surplus()
536 unsigned long *p_addr, *bm; in bm_set_surplus() local
548 bm = p_addr + (tmp/BITS_PER_LONG); in bm_set_surplus()
[all …]
/drivers/irqchip/
Dirq-gic-v2m.c71 unsigned long *bm; /* MSI vector bitmap */ member
169 __clear_bit(pos, v2m->bm); in gicv2m_unalloc_msi()
181 offset = find_first_zero_bit(tmp->bm, tmp->nr_spis); in gicv2m_irq_domain_alloc()
183 __set_bit(offset, tmp->bm); in gicv2m_irq_domain_alloc()
258 kfree(v2m->bm); in gicv2m_teardown()
366 v2m->bm = kzalloc(sizeof(long) * BITS_TO_LONGS(v2m->nr_spis), in gicv2m_init_one()
368 if (!v2m->bm) { in gicv2m_init_one()
/drivers/md/
Ddm-cache-metadata.c108 struct dm_block_manager *bm; member
239 return dm_bm_read_lock(cmd->bm, CACHE_SUPERBLOCK_LOCATION, in superblock_read_lock()
246 return dm_bm_write_lock_zero(cmd->bm, CACHE_SUPERBLOCK_LOCATION, in superblock_lock_zero()
253 return dm_bm_write_lock(cmd->bm, CACHE_SUPERBLOCK_LOCATION, in superblock_lock()
259 static int __superblock_all_zeroes(struct dm_block_manager *bm, bool *result) in __superblock_all_zeroes() argument
265 unsigned sb_block_size = dm_bm_block_size(bm) / sizeof(__le64); in __superblock_all_zeroes()
270 r = dm_bm_read_lock(bm, CACHE_SUPERBLOCK_LOCATION, NULL, &b); in __superblock_all_zeroes()
393 r = dm_tm_create_with_sm(cmd->bm, CACHE_SUPERBLOCK_LOCATION, in __format_metadata()
492 r = dm_tm_open_with_sm(cmd->bm, CACHE_SUPERBLOCK_LOCATION, in __open_metadata()
521 r = __superblock_all_zeroes(cmd->bm, &unformatted); in __open_or_format_metadata()
[all …]
Ddm-thin-metadata.c149 struct dm_block_manager *bm; member
373 return dm_bm_write_lock_zero(pmd->bm, THIN_SUPERBLOCK_LOCATION, in superblock_lock_zero()
380 return dm_bm_write_lock(pmd->bm, THIN_SUPERBLOCK_LOCATION, in superblock_lock()
384 static int __superblock_all_zeroes(struct dm_block_manager *bm, int *result) in __superblock_all_zeroes() argument
390 unsigned block_size = dm_bm_block_size(bm) / sizeof(__le64); in __superblock_all_zeroes()
395 r = dm_bm_read_lock(bm, THIN_SUPERBLOCK_LOCATION, NULL, &b); in __superblock_all_zeroes()
533 r = dm_tm_create_with_sm(pmd->bm, THIN_SUPERBLOCK_LOCATION, in __format_metadata()
617 r = dm_bm_read_lock(pmd->bm, THIN_SUPERBLOCK_LOCATION, in __open_metadata()
639 r = dm_tm_open_with_sm(pmd->bm, THIN_SUPERBLOCK_LOCATION, in __open_metadata()
683 r = __superblock_all_zeroes(pmd->bm, &unformatted); in __open_or_format_metadata()
[all …]
Ddm-era-target.c261 struct dm_block_manager *bm; member
302 return dm_bm_read_lock(md->bm, SUPERBLOCK_LOCATION, in superblock_read_lock()
309 return dm_bm_write_lock_zero(md->bm, SUPERBLOCK_LOCATION, in superblock_lock_zero()
316 return dm_bm_write_lock(md->bm, SUPERBLOCK_LOCATION, in superblock_lock()
321 static int superblock_all_zeroes(struct dm_block_manager *bm, bool *result) in superblock_all_zeroes() argument
327 unsigned sb_block_size = dm_bm_block_size(bm) / sizeof(__le64); in superblock_all_zeroes()
332 r = dm_bm_read_lock(bm, SUPERBLOCK_LOCATION, NULL, &b); in superblock_all_zeroes()
433 r = dm_tm_create_with_sm(md->bm, SUPERBLOCK_LOCATION, in create_fresh_metadata()
566 r = dm_tm_open_with_sm(md->bm, SUPERBLOCK_LOCATION, in open_metadata()
601 r = superblock_all_zeroes(md->bm, &unformatted); in open_or_format_metadata()
[all …]
/drivers/w1/
Dw1_int.c233 void w1_remove_master_device(struct w1_bus_master *bm) in w1_remove_master_device() argument
241 if (dev->bus_master->data == bm->data) { in w1_remove_master_device()
/drivers/input/
Dinput.c54 unsigned long *bm, unsigned int max) in is_event_supported() argument
56 return code <= max && test_bit(code, bm); in is_event_supported()
1326 char name, unsigned long *bm, in input_print_modalias_bits() argument
1333 if (bm[BIT_WORD(i)] & BIT_MASK(i)) in input_print_modalias_bits()
1470 #define INPUT_DEV_CAP_ATTR(ev, bm) \ argument
1471 static ssize_t input_dev_show_cap_##bm(struct device *dev, \
1477 input_dev->bm##bit, ev##_MAX, \
1481 static DEVICE_ATTR(bm, S_IRUGO, input_dev_show_cap_##bm, NULL)
1578 #define INPUT_ADD_HOTPLUG_BM_VAR(name, bm, max) \ argument
1580 int err = input_add_uevent_bm_var(env, name, bm, max); \
/drivers/gpu/drm/imx/
Dimx-ldb.c507 const char *bm; in of_get_bus_format() local
511 ret = of_property_read_string(np, "fsl,data-mapping", &bm); in of_get_bus_format()
518 if (!strcasecmp(bm, imx_ldb_bit_mappings[i].mapping) && in of_get_bus_format()
523 dev_err(dev, "invalid data mapping: %d-bit \"%s\"\n", datawidth, bm); in of_get_bus_format()

12