Home
last modified time | relevance | path

Searched refs:eaddr (Results 1 – 25 of 46) sorted by relevance

12

/kernel/liteos_a/testsuites/unittest/net/resolv/full/
Dnet_resolv_test_005.cpp37 struct ether_addr addr, *eaddr = &addr; in EtherLineTest() local
41 ret = ether_line("localhost 01:02:03:04:05:06", eaddr, buf); in EtherLineTest()
44 ret = ether_line("01:02:03:04:05:06 localhost", eaddr, buf); in EtherLineTest()
48 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[0], 0x01, eaddr->ether_addr_octet[0]); in EtherLineTest()
49 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[1], 0x02, eaddr->ether_addr_octet[1]); in EtherLineTest()
50 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[2], 0x03, eaddr->ether_addr_octet[2]); in EtherLineTest()
51 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[3], 0x04, eaddr->ether_addr_octet[3]); in EtherLineTest()
52 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[4], 0x05, eaddr->ether_addr_octet[4]); in EtherLineTest()
53 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[5], 0x06, eaddr->ether_addr_octet[5]); in EtherLineTest()
Dnet_resolv_test_004.cpp48 struct ether_addr addr, *eaddr = &addr; in EtherHosttonTest() local
49 int ret = ether_hostton("localhost", eaddr); in EtherHosttonTest()
52 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[0], 0x00, eaddr->ether_addr_octet[0]); in EtherHosttonTest()
53 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[1], 0x00, eaddr->ether_addr_octet[1]); in EtherHosttonTest()
54 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[2], 0x00, eaddr->ether_addr_octet[2]); in EtherHosttonTest()
55 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[3], 0x00, eaddr->ether_addr_octet[3]); in EtherHosttonTest()
56 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[4], 0x00, eaddr->ether_addr_octet[4]); in EtherHosttonTest()
57 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[5], 0x00, eaddr->ether_addr_octet[5]); in EtherHosttonTest()
Dnet_resolv_test_008.cpp49 struct ether_addr addr = {{0,0,0,0,0,0}}, *eaddr = &addr; in EtherNtohostTest() local
51 int ret = ether_ntohost(buf, eaddr); in EtherNtohostTest()
/kernel/liteos_a/testsuites/unittest/net/resolv/smoke/
Dnet_resolv_test_002.cpp39 struct ether_addr *eaddr = ether_aton("01::EF"); in EtherAtonTest() local
41 ICUNIT_ASSERT_EQUAL(eaddr, NULL, -1); in EtherAtonTest()
51 eaddr = ether_aton(mac_addr); in EtherAtonTest()
52 ICUNIT_ASSERT_NOT_EQUAL(eaddr, NULL, -1); in EtherAtonTest()
54 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[0], r[0], eaddr->ether_addr_octet[0]); in EtherAtonTest()
55 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[1], r[1], eaddr->ether_addr_octet[1]); in EtherAtonTest()
56 …ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[2], r[2], eaddr->ether_addr_octet[2]); // 2: compare r… in EtherAtonTest()
57 …ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[3], r[3], eaddr->ether_addr_octet[3]); // 3: compare r… in EtherAtonTest()
58 …ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[4], r[4], eaddr->ether_addr_octet[4]); // 4: compare r… in EtherAtonTest()
59 …ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[5], r[5], eaddr->ether_addr_octet[5]); // 5: compare r… in EtherAtonTest()
Dnet_resolv_test_003.cpp40 struct ether_addr *eaddr = ether_aton_r("::01:EF", &addr); in EtherAtonrTest() local
42 ICUNIT_ASSERT_EQUAL(eaddr, NULL, -1); in EtherAtonrTest()
52 eaddr = ether_aton_r(mac_addr, &addr); in EtherAtonrTest()
54 ICUNIT_ASSERT_EQUAL(eaddr, &addr, -1); in EtherAtonrTest()
55 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[0], r[0], eaddr->ether_addr_octet[0]); in EtherAtonrTest()
56 ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[1], r[1], eaddr->ether_addr_octet[1]); in EtherAtonrTest()
57 …ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[2], r[2], eaddr->ether_addr_octet[2]); // 2: compare r… in EtherAtonrTest()
58 …ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[3], r[3], eaddr->ether_addr_octet[3]); // 3: compare r… in EtherAtonrTest()
59 …ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[4], r[4], eaddr->ether_addr_octet[4]); // 4: compare r… in EtherAtonrTest()
60 …ICUNIT_ASSERT_EQUAL(eaddr->ether_addr_octet[5], r[5], eaddr->ether_addr_octet[5]); // 5: compare r… in EtherAtonrTest()
Dnet_resolv_test_006.cpp39 struct ether_addr addr, *eaddr = &addr; in EtherNtoaTest() local
44 eaddr->ether_addr_octet[i] = r[i]; in EtherNtoaTest()
46 char *buf = ether_ntoa(eaddr); in EtherNtoaTest()
Dnet_resolv_test_007.cpp38 struct ether_addr addr, *eaddr = &addr; in EtherNtoarTest() local
43 eaddr->ether_addr_octet[i] = r[i]; in EtherNtoarTest()
45 char buf[100], *p = ether_ntoa_r(eaddr, buf); in EtherNtoarTest()
/kernel/linux/linux-5.10/arch/powerpc/kvm/
Dbook3s_32_mmu.c69 static int kvmppc_mmu_book3s_32_xlate_bat(struct kvm_vcpu *vcpu, gva_t eaddr,
75 static u32 find_sr(struct kvm_vcpu *vcpu, gva_t eaddr) in find_sr() argument
77 return kvmppc_get_sr(vcpu, (eaddr >> 28) & 0xf); in find_sr()
80 static u64 kvmppc_mmu_book3s_32_ea_to_vp(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmppc_mmu_book3s_32_ea_to_vp() argument
86 if (!kvmppc_mmu_book3s_32_xlate_bat(vcpu, eaddr, &pte, data, false)) in kvmppc_mmu_book3s_32_ea_to_vp()
89 kvmppc_mmu_book3s_32_esid_to_vsid(vcpu, eaddr >> SID_SHIFT, &vsid); in kvmppc_mmu_book3s_32_ea_to_vp()
90 return (((u64)eaddr >> 12) & 0xffff) | (vsid << 16); in kvmppc_mmu_book3s_32_ea_to_vp()
94 u32 sre, gva_t eaddr, in kvmppc_mmu_book3s_32_get_pteg() argument
101 page = (eaddr & 0x0FFFFFFF) >> 12; in kvmppc_mmu_book3s_32_get_pteg()
112 kvmppc_get_pc(vcpu), eaddr, vcpu_book3s->sdr1, pteg, in kvmppc_mmu_book3s_32_get_pteg()
[all …]
Dbook3s_64_mmu.c29 gva_t eaddr) in kvmppc_mmu_book3s_64_find_slbe() argument
32 u64 esid = GET_ESID(eaddr); in kvmppc_mmu_book3s_64_find_slbe()
33 u64 esid_1t = GET_ESID_1T(eaddr); in kvmppc_mmu_book3s_64_find_slbe()
49 eaddr, esid, esid_1t); in kvmppc_mmu_book3s_64_find_slbe()
73 static u64 kvmppc_slb_calc_vpn(struct kvmppc_slb *slb, gva_t eaddr) in kvmppc_slb_calc_vpn() argument
75 eaddr &= kvmppc_slb_offset_mask(slb); in kvmppc_slb_calc_vpn()
77 return (eaddr >> VPN_SHIFT) | in kvmppc_slb_calc_vpn()
81 static u64 kvmppc_mmu_book3s_64_ea_to_vp(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmppc_mmu_book3s_64_ea_to_vp() argument
86 slb = kvmppc_mmu_book3s_64_find_slbe(vcpu, eaddr); in kvmppc_mmu_book3s_64_ea_to_vp()
90 return kvmppc_slb_calc_vpn(slb, eaddr); in kvmppc_mmu_book3s_64_ea_to_vp()
[all …]
Dtrace_pr.h39 __field( unsigned long, eaddr )
49 __entry->eaddr = orig_pte->eaddr;
57 __entry->flag_w, __entry->flag_x, __entry->eaddr,
70 __field( ulong, eaddr )
79 __entry->eaddr = pte->pte.eaddr;
88 __entry->host_vpn, __entry->pfn, __entry->eaddr,
99 __field( ulong, eaddr )
108 __entry->eaddr = pte->pte.eaddr;
117 __entry->host_vpn, __entry->pfn, __entry->eaddr,
Dbook3s_32_mmu_host.c59 asm volatile ("tlbie %0" : : "r" (pte->pte.eaddr) : "memory"); in kvmppc_mmu_invalidate_pte()
106 static u32 *kvmppc_mmu_get_pteg(struct kvm_vcpu *vcpu, u32 vsid, u32 eaddr, in kvmppc_mmu_get_pteg() argument
112 page = (eaddr & ~ESID_MASK) >> 12; in kvmppc_mmu_get_pteg()
138 u32 eaddr = orig_pte->eaddr; in kvmppc_mmu_map_page() local
158 vcpu->arch.mmu.esid_to_vsid(vcpu, orig_pte->eaddr >> SID_SHIFT, &vsid); in kvmppc_mmu_map_page()
161 kvmppc_mmu_map_segment(vcpu, eaddr); in kvmppc_mmu_map_page()
168 ((eaddr & ~ESID_MASK) >> VPN_SHIFT); in kvmppc_mmu_map_page()
176 pteg = kvmppc_mmu_get_pteg(vcpu, vsid, eaddr, primary); in kvmppc_mmu_map_page()
194 pteg0 = ((eaddr & 0x0fffffff) >> 22) | (vsid << 7) | PTE_V | in kvmppc_mmu_map_page()
243 orig_pte->eaddr, (ulong)pteg, vpn, in kvmppc_mmu_map_page()
[all …]
De500_mmu.c81 gva_t eaddr, int tlbsel, unsigned int pid, int as) in kvmppc_e500_tlb_index() argument
88 set_base = gtlb0_set_base(vcpu_e500, eaddr); in kvmppc_e500_tlb_index()
91 if (eaddr < vcpu_e500->tlb1_min_eaddr || in kvmppc_e500_tlb_index()
92 eaddr > vcpu_e500->tlb1_max_eaddr) in kvmppc_e500_tlb_index()
104 if (eaddr < get_tlb_eaddr(tlbe)) in kvmppc_e500_tlb_index()
107 if (eaddr > get_tlb_end(tlbe)) in kvmppc_e500_tlb_index()
127 gva_t eaddr, int as) in kvmppc_e500_deliver_tlb_miss() argument
143 vcpu->arch.shared->mas2 = (eaddr & MAS2_EPN) in kvmppc_e500_deliver_tlb_miss()
155 gva_t eaddr; in kvmppc_recalc_tlb1map_range() local
169 eaddr = get_tlb_eaddr(tlbe); in kvmppc_recalc_tlb1map_range()
[all …]
Dbook3s_mmu_hpte.c26 static inline u64 kvmppc_mmu_hash_pte(u64 eaddr) in kvmppc_mmu_hash_pte() argument
28 return hash_64(eaddr >> PTE_SIZE, HPTEG_HASH_BITS_PTE); in kvmppc_mmu_hash_pte()
31 static inline u64 kvmppc_mmu_hash_pte_long(u64 eaddr) in kvmppc_mmu_hash_pte_long() argument
33 return hash_64((eaddr & 0x0ffff000) >> PTE_SIZE, in kvmppc_mmu_hash_pte_long()
66 index = kvmppc_mmu_hash_pte(pte->pte.eaddr); in kvmppc_mmu_hpte_cache_map()
70 index = kvmppc_mmu_hash_pte_long(pte->pte.eaddr); in kvmppc_mmu_hpte_cache_map()
163 if ((pte->pte.eaddr & ~0xfffUL) == guest_ea) in kvmppc_mmu_pte_flush_page()
183 if ((pte->pte.eaddr & 0x0ffff000UL) == guest_ea) in kvmppc_mmu_pte_flush_long()
Dbook3s_64_mmu_host.c106 vcpu->arch.mmu.esid_to_vsid(vcpu, orig_pte->eaddr >> SID_SHIFT, &vsid); in kvmppc_mmu_map_page()
109 ret = kvmppc_mmu_map_segment(vcpu, orig_pte->eaddr); in kvmppc_mmu_map_page()
115 vsid, orig_pte->eaddr); in kvmppc_mmu_map_page()
121 vpn = hpt_vpn(orig_pte->eaddr, map->host_vsid, MMU_SEGSIZE_256M); in kvmppc_mmu_map_page()
217 vcpu->arch.mmu.esid_to_vsid(vcpu, pte->eaddr >> SID_SHIFT, &vsid); in kvmppc_mmu_unmap_page()
310 int kvmppc_mmu_map_segment(struct kvm_vcpu *vcpu, ulong eaddr) in kvmppc_mmu_map_segment() argument
313 u64 esid = eaddr >> SID_SHIFT; in kvmppc_mmu_map_segment()
314 u64 slb_esid = (eaddr & ESID_MASK) | SLB_ESID_V; in kvmppc_mmu_map_segment()
321 slb_index = kvmppc_mmu_next_segment(vcpu, eaddr & ESID_MASK); in kvmppc_mmu_map_segment()
Dbook3s_64_mmu_radix.c33 gva_t eaddr, void *to, void *from, in __kvmhv_copy_tofrom_guest_radix() argument
42 return plpar_hcall_norets(H_COPY_TOFROM_GUEST, lpid, pid, eaddr, in __kvmhv_copy_tofrom_guest_radix()
50 from = (void *) (eaddr | (quadrant << 62)); in __kvmhv_copy_tofrom_guest_radix()
52 to = (void *) (eaddr | (quadrant << 62)); in __kvmhv_copy_tofrom_guest_radix()
87 static long kvmhv_copy_tofrom_guest_radix(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmhv_copy_tofrom_guest_radix() argument
94 if (eaddr & (0x3FFUL << 52)) in kvmhv_copy_tofrom_guest_radix()
102 if (((eaddr >> 62) & 0x3) == 0x3) in kvmhv_copy_tofrom_guest_radix()
105 eaddr &= ~(0xFFFUL << 52); in kvmhv_copy_tofrom_guest_radix()
107 return __kvmhv_copy_tofrom_guest_radix(lpid, pid, eaddr, to, from, n); in kvmhv_copy_tofrom_guest_radix()
110 long kvmhv_copy_from_guest_radix(struct kvm_vcpu *vcpu, gva_t eaddr, void *to, in kvmhv_copy_from_guest_radix() argument
[all …]
De500_mmu_host.c105 static u32 get_host_mas0(unsigned long eaddr) in get_host_mas0() argument
115 asm volatile("tlbsx 0, %0" : : "b" (eaddr & ~CONFIG_PAGE_OFFSET)); in get_host_mas0()
586 void kvmppc_mmu_map(struct kvm_vcpu *vcpu, u64 eaddr, gpa_t gpaddr, in kvmppc_mmu_map() argument
606 &priv->ref, eaddr, &stlbe); in kvmppc_mmu_map()
613 kvmppc_e500_tlb1_map(vcpu_e500, eaddr, gfn, gtlbe, &stlbe, in kvmppc_mmu_map()
631 hva_t eaddr; in kvmppc_load_last_inst() local
708 eaddr = (unsigned long)kmap_atomic(page); in kvmppc_load_last_inst()
709 *instr = *(u32 *)(eaddr | (unsigned long)(addr & ~PAGE_MASK)); in kvmppc_load_last_inst()
710 kunmap_atomic((u32 *)eaddr); in kvmppc_load_last_inst()
Dbooke.c1236 unsigned long eaddr = vcpu->arch.fault_dear; in kvmppc_handle_exit() local
1243 (eaddr & PAGE_MASK) == vcpu->arch.magic_page_ea) { in kvmppc_handle_exit()
1253 gtlb_index = kvmppc_mmu_dtlb_index(vcpu, eaddr); in kvmppc_handle_exit()
1267 gpaddr = kvmppc_mmu_xlate(vcpu, gtlb_index, eaddr); in kvmppc_handle_exit()
1277 kvmppc_mmu_map(vcpu, eaddr, gpaddr, gtlb_index); in kvmppc_handle_exit()
1284 vcpu->arch.vaddr_accessed = eaddr; in kvmppc_handle_exit()
1294 unsigned long eaddr = vcpu->arch.regs.nip; in kvmppc_handle_exit() local
1302 gtlb_index = kvmppc_mmu_itlb_index(vcpu, eaddr); in kvmppc_handle_exit()
1315 gpaddr = kvmppc_mmu_xlate(vcpu, gtlb_index, eaddr); in kvmppc_handle_exit()
1325 kvmppc_mmu_map(vcpu, eaddr, gpaddr, gtlb_index); in kvmppc_handle_exit()
[all …]
De500mc.c59 gva_t eaddr; in kvmppc_e500_tlbil_one() local
68 eaddr = get_tlb_eaddr(gtlbe); in kvmppc_e500_tlbil_one()
75 asm volatile("tlbsx 0, %[eaddr]\n" : : [eaddr] "r" (eaddr)); in kvmppc_e500_tlbil_one()
/kernel/linux/linux-5.10/arch/powerpc/include/asm/
Dkvm_book3s.h155 extern int kvmppc_mmu_map_segment(struct kvm_vcpu *vcpu, ulong eaddr);
156 extern void kvmppc_mmu_flush_segment(struct kvm_vcpu *vcpu, ulong eaddr, ulong seg_size);
160 extern long kvmppc_hv_find_lock_hpte(struct kvm *kvm, gva_t eaddr,
179 gva_t eaddr, void *to, void *from,
181 extern long kvmhv_copy_from_guest_radix(struct kvm_vcpu *vcpu, gva_t eaddr,
183 extern long kvmhv_copy_to_guest_radix(struct kvm_vcpu *vcpu, gva_t eaddr,
185 extern int kvmppc_mmu_walk_radix_tree(struct kvm_vcpu *vcpu, gva_t eaddr,
188 extern int kvmppc_mmu_radix_translate_table(struct kvm_vcpu *vcpu, gva_t eaddr,
191 extern int kvmppc_mmu_radix_xlate(struct kvm_vcpu *vcpu, gva_t eaddr,
226 extern int kvmppc_ld(struct kvm_vcpu *vcpu, ulong *eaddr, int size, void *ptr, bool data);
Dkvm_ppc.h89 extern int kvmppc_ld(struct kvm_vcpu *vcpu, ulong *eaddr, int size, void *ptr,
91 extern int kvmppc_st(struct kvm_vcpu *vcpu, ulong *eaddr, int size, void *ptr,
109 extern int kvmppc_mmu_dtlb_index(struct kvm_vcpu *vcpu, gva_t eaddr);
110 extern int kvmppc_mmu_itlb_index(struct kvm_vcpu *vcpu, gva_t eaddr);
112 gva_t eaddr);
115 extern int kvmppc_xlate(struct kvm_vcpu *vcpu, ulong eaddr,
311 int (*load_from_eaddr)(struct kvm_vcpu *vcpu, ulong *eaddr, void *ptr,
313 int (*store_to_eaddr)(struct kvm_vcpu *vcpu, ulong *eaddr, void *ptr,
/kernel/linux/linux-5.10/arch/arm/mm/
Dalignment.c495 unsigned long eaddr, newaddr; in do_alignment_ldmstm() local
509 newaddr = eaddr = regs->uregs[rn]; in do_alignment_ldmstm()
515 eaddr = newaddr; in do_alignment_ldmstm()
518 eaddr += 4; in do_alignment_ldmstm()
532 if (addr != eaddr) { in do_alignment_ldmstm()
535 instruction_pointer(regs), instr, addr, eaddr); in do_alignment_ldmstm()
547 get32t_unaligned_check(val, eaddr); in do_alignment_ldmstm()
550 put32t_unaligned_check(regs->uregs[rd], eaddr); in do_alignment_ldmstm()
551 eaddr += 4; in do_alignment_ldmstm()
560 get32_unaligned_check(val, eaddr); in do_alignment_ldmstm()
[all …]
/kernel/linux/linux-5.10/fs/freevxfs/
Dvxfs_olt.c82 char *oaddr, *eaddr; in vxfs_read_olt() local
105 eaddr = bp->b_data + (infp->vsi_oltsize * sbp->s_blocksize); in vxfs_read_olt()
107 while (oaddr < eaddr) { in vxfs_read_olt()
/kernel/linux/linux-5.10/arch/powerpc/platforms/pseries/
Dras.c564 unsigned long eaddr = 0, paddr = 0; in mce_handle_err_virtmode() local
636 eaddr = be64_to_cpu(mce_log->effective_address); in mce_handle_err_virtmode()
643 pfn = addr_to_pfn(regs, eaddr); in mce_handle_err_virtmode()
664 eaddr = be64_to_cpu(mce_log->effective_address); in mce_handle_err_virtmode()
681 eaddr = be64_to_cpu(mce_log->effective_address); in mce_handle_err_virtmode()
698 eaddr = be64_to_cpu(mce_log->effective_address); in mce_handle_err_virtmode()
713 &mce_err, regs->nip, eaddr, paddr); in mce_handle_err_virtmode()
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/
Damdgpu_vm.c2223 uint64_t eaddr; in amdgpu_vm_bo_map() local
2231 eaddr = saddr + size - 1; in amdgpu_vm_bo_map()
2232 if (saddr >= eaddr || in amdgpu_vm_bo_map()
2234 (eaddr >= adev->vm_manager.max_pfn << AMDGPU_GPU_PAGE_SHIFT)) in amdgpu_vm_bo_map()
2238 eaddr /= AMDGPU_GPU_PAGE_SIZE; in amdgpu_vm_bo_map()
2240 tmp = amdgpu_vm_it_iter_first(&vm->va, saddr, eaddr); in amdgpu_vm_bo_map()
2244 "0x%010Lx-0x%010Lx\n", bo, saddr, eaddr, in amdgpu_vm_bo_map()
2254 mapping->last = eaddr; in amdgpu_vm_bo_map()
2288 uint64_t eaddr; in amdgpu_vm_bo_replace_map() local
2297 eaddr = saddr + size - 1; in amdgpu_vm_bo_replace_map()
[all …]
/kernel/linux/linux-5.10/drivers/slimbus/
Dcore.c178 struct slim_eaddr *eaddr, in slim_alloc_device() argument
188 sbdev->e_addr = *eaddr; in slim_alloc_device()
350 struct slim_eaddr *eaddr) in find_slim_device() argument
355 dev = device_find_child(ctrl->dev, eaddr, slim_match_dev); in find_slim_device()

12