@@ -1486,6 +1486,7 @@ where
14861486 }
14871487 _ => panic ! ( ) ,
14881488 } ,
1489+ Object :: FieldUnit ( field) => self . do_field_write ( field, object) ?,
14891490 Object :: Debug => {
14901491 self . handler . handle_debug ( & * object) ;
14911492 }
@@ -1526,56 +1527,129 @@ where
15261527 Output :: Integer ( value) => value,
15271528 } ;
15281529
1529- match field. kind {
1530- FieldUnitKind :: Normal { ref region } => {
1531- let Object :: OpRegion ( ref region) = * * region else { panic ! ( ) } ;
1532-
1533- /*
1534- * TODO: it might be worth having a fast path here for reads that don't do weird
1535- * unaligned accesses, which I'm guessing might be relatively common on real
1536- * hardware? Eg. single native read + mask
1537- */
1538-
1539- /*
1540- * Break the field read into native reads that respect the region's access width.
1541- * Copy each potentially-unaligned part into the destination's bit range.
1542- */
1543- let native_accesses_needed = ( field. bit_length + ( field. bit_index % access_width_bits) )
1544- . next_multiple_of ( access_width_bits)
1545- / access_width_bits;
1546- let mut read_so_far = 0 ;
1547- for i in 0 ..native_accesses_needed {
1548- let aligned_offset =
1549- object:: align_down ( field. bit_index + i * access_width_bits, access_width_bits) ;
1550- let raw = self . do_native_region_read ( region, aligned_offset / 8 , access_width_bits / 8 ) ?;
1551- let src_index = if i == 0 { field. bit_index % access_width_bits } else { 0 } ;
1552- let remaining_length = field. bit_length - read_so_far;
1553- let length = if i == 0 {
1554- usize:: min ( remaining_length, access_width_bits - ( field. bit_index % access_width_bits) )
1555- } else {
1556- usize:: min ( remaining_length, access_width_bits)
1557- } ;
1530+ let read_region = match field. kind {
1531+ FieldUnitKind :: Normal { ref region } => region,
1532+ FieldUnitKind :: Bank { ref region, ref bank, bank_value } => {
1533+ // TODO: put the bank_value in the bank
1534+ todo ! ( ) ;
1535+ region
1536+ }
1537+ FieldUnitKind :: Index { ref index, ref data } => {
1538+ // TODO: configure the correct index
1539+ todo ! ( ) ;
1540+ data
1541+ }
1542+ } ;
1543+ let Object :: OpRegion ( ref read_region) = * * read_region else { panic ! ( ) } ;
15581544
1559- trace ! (
1560- "Extracting bits {}..{} from native read to bits {}..{}" ,
1561- src_index,
1562- src_index + length,
1563- read_so_far,
1564- read_so_far + length,
1565- ) ;
1566- object:: copy_bits ( & raw . to_le_bytes ( ) , src_index, output_bytes, read_so_far, length) ;
1545+ /*
1546+ * TODO: it might be worth having a fast path here for reads that don't do weird
1547+ * unaligned accesses, which I'm guessing might be relatively common on real
1548+ * hardware? Eg. single native read + mask
1549+ */
15671550
1568- read_so_far += length;
1569- }
1551+ /*
1552+ * Break the field read into native reads that respect the region's access width.
1553+ * Copy each potentially-unaligned part into the destination's bit range.
1554+ */
1555+ let native_accesses_needed = ( field. bit_length + ( field. bit_index % access_width_bits) )
1556+ . next_multiple_of ( access_width_bits)
1557+ / access_width_bits;
1558+ let mut read_so_far = 0 ;
1559+ for i in 0 ..native_accesses_needed {
1560+ let aligned_offset = object:: align_down ( field. bit_index + i * access_width_bits, access_width_bits) ;
1561+ let raw = self . do_native_region_read ( read_region, aligned_offset / 8 , access_width_bits / 8 ) ?;
1562+ let src_index = if i == 0 { field. bit_index % access_width_bits } else { 0 } ;
1563+ let remaining_length = field. bit_length - read_so_far;
1564+ let length = if i == 0 {
1565+ usize:: min ( remaining_length, access_width_bits - ( field. bit_index % access_width_bits) )
1566+ } else {
1567+ usize:: min ( remaining_length, access_width_bits)
1568+ } ;
15701569
1571- match output {
1572- Output :: Buffer ( bytes) => Ok ( Arc :: new ( Object :: Buffer ( bytes) ) ) ,
1573- Output :: Integer ( value) => Ok ( Arc :: new ( Object :: Integer ( u64:: from_le_bytes ( value) ) ) ) ,
1574- }
1570+ object:: copy_bits ( & raw . to_le_bytes ( ) , src_index, output_bytes, read_so_far, length) ;
1571+ read_so_far += length;
1572+ }
1573+
1574+ match output {
1575+ Output :: Buffer ( bytes) => Ok ( Arc :: new ( Object :: Buffer ( bytes) ) ) ,
1576+ Output :: Integer ( value) => Ok ( Arc :: new ( Object :: Integer ( u64:: from_le_bytes ( value) ) ) ) ,
1577+ }
1578+ }
1579+
1580+ fn do_field_write ( & self , field : & FieldUnit , value : Arc < Object > ) -> Result < ( ) , AmlError > {
1581+ trace ! ( "AML field write. Field = {:?}. Value = {:?}" , field, value) ;
1582+
1583+ let value_bytes = match & * value {
1584+ Object :: Integer ( value) => & value. to_le_bytes ( ) as & [ u8 ] ,
1585+ Object :: Buffer ( bytes) => & bytes,
1586+ _ => Err ( AmlError :: ObjectNotOfExpectedType { expected : ObjectType :: Integer , got : value. typ ( ) } ) ?,
1587+ } ;
1588+ let access_width_bits = field. flags . access_type_bytes ( ) ? * 8 ;
1589+
1590+ let write_region = match field. kind {
1591+ FieldUnitKind :: Normal { ref region } => region,
1592+ FieldUnitKind :: Bank { ref region, ref bank, bank_value } => {
1593+ // TODO: put the bank_value in the bank
1594+ todo ! ( ) ;
1595+ region
1596+ }
1597+ FieldUnitKind :: Index { ref index, ref data } => {
1598+ // TODO: configure the correct index
1599+ todo ! ( ) ;
1600+ data
15751601 }
1576- FieldUnitKind :: Bank { ref region, ref bank, bank_value } => todo ! ( ) ,
1577- FieldUnitKind :: Index { ref index, ref data } => todo ! ( ) ,
1602+ } ;
1603+ let Object :: OpRegion ( ref write_region) = * * write_region else { panic ! ( ) } ;
1604+
1605+ // TODO: if the region wants locking, do that
1606+
1607+ // TODO: maybe also a fast path for writes
1608+
1609+ let native_accesses_needed = ( field. bit_length + ( field. bit_index % access_width_bits) )
1610+ . next_multiple_of ( access_width_bits)
1611+ / access_width_bits;
1612+ let mut written_so_far = 0 ;
1613+
1614+ for i in 0 ..native_accesses_needed {
1615+ let aligned_offset = object:: align_down ( field. bit_index + i * access_width_bits, access_width_bits) ;
1616+ let dst_index = if i == 0 { field. bit_index % access_width_bits } else { 0 } ;
1617+
1618+ /*
1619+ * If we're not going to write a whole native access, respect the field's
1620+ * update rule. If we're meant to preserve the surrounding bits, we need to do
1621+ * a read first.
1622+ */
1623+ let mut bytes = if dst_index > 0 || ( field. bit_length - written_so_far) < access_width_bits {
1624+ match field. flags . update_rule ( ) {
1625+ FieldUpdateRule :: Preserve => self
1626+ . do_native_region_read ( write_region, aligned_offset / 8 , access_width_bits / 8 ) ?
1627+ . to_le_bytes ( ) ,
1628+ FieldUpdateRule :: WriteAsOnes => [ 0xff ; 8 ] ,
1629+ FieldUpdateRule :: WriteAsZeros => [ 0 ; 8 ] ,
1630+ }
1631+ } else {
1632+ [ 0 ; 8 ]
1633+ } ;
1634+
1635+ let remaining_length = field. bit_length - written_so_far;
1636+ let length = if i == 0 {
1637+ usize:: min ( remaining_length, access_width_bits - ( field. bit_index % access_width_bits) )
1638+ } else {
1639+ usize:: min ( remaining_length, access_width_bits)
1640+ } ;
1641+
1642+ object:: copy_bits ( value_bytes, written_so_far, & mut bytes, dst_index, length) ;
1643+ self . do_native_region_write (
1644+ write_region,
1645+ aligned_offset / 8 ,
1646+ access_width_bits / 8 ,
1647+ u64:: from_le_bytes ( bytes) ,
1648+ ) ?;
1649+ written_so_far += length;
15781650 }
1651+
1652+ Ok ( ( ) )
15791653 }
15801654
15811655 /// Performs an actual read from an operation region. `offset` and `length` must respect the
@@ -1605,37 +1679,7 @@ where
16051679 }
16061680 } ) ,
16071681 RegionSpace :: PciConfig => {
1608- /*
1609- * TODO: it's not ideal to do these reads for every native access. See if we can
1610- * cache them somewhere?
1611- */
1612- let seg = match self . invoke_method_if_present (
1613- AmlName :: from_str ( "_SEG" ) . unwrap ( ) . resolve ( & region. parent_device_path ) ?,
1614- vec ! [ ] ,
1615- ) ? {
1616- Some ( value) => value. as_integer ( ) ?,
1617- None => 0 ,
1618- } ;
1619- let bus = match self . invoke_method_if_present (
1620- AmlName :: from_str ( "_BBR" ) . unwrap ( ) . resolve ( & region. parent_device_path ) ?,
1621- vec ! [ ] ,
1622- ) ? {
1623- Some ( value) => value. as_integer ( ) ?,
1624- None => 0 ,
1625- } ;
1626- let ( device, function) = {
1627- let adr = self . invoke_method_if_present (
1628- AmlName :: from_str ( "_ADR" ) . unwrap ( ) . resolve ( & region. parent_device_path ) ?,
1629- vec ! [ ] ,
1630- ) ?;
1631- let adr = match adr {
1632- Some ( adr) => adr. as_integer ( ) ?,
1633- None => 0 ,
1634- } ;
1635- ( adr. get_bits ( 16 ..32 ) , adr. get_bits ( 0 ..16 ) )
1636- } ;
1637-
1638- let address = PciAddress :: new ( seg as u16 , bus as u8 , device as u8 , function as u8 ) ;
1682+ let address = self . pci_address_for_device ( & region. parent_device_path ) ?;
16391683 match length {
16401684 1 => Ok ( self . handler . read_pci_u8 ( address, offset as u16 ) as u64 ) ,
16411685 2 => Ok ( self . handler . read_pci_u16 ( address, offset as u16 ) as u64 ) ,
@@ -1656,12 +1700,99 @@ where
16561700 if let Some ( handler) = self . region_handlers . lock ( ) . get ( & region. space ) {
16571701 todo ! ( "Utilise handler" ) ;
16581702 } else {
1659- // TODO: panic or normal error here??
1660- panic ! ( "Unhandled region space that needs handler!" ) ;
1703+ Err ( AmlError :: NoHandlerForRegionAccess ( region. space ) )
1704+ }
1705+ }
1706+ }
1707+ }
1708+
1709+ /// Performs an actual write to an operation region. `offset` and `length` must respect the
1710+ /// access requirements of the field being read, and are supplied in **bytes**. This may call
1711+ /// AML methods if required, and may invoke user-supplied handlers.
1712+ fn do_native_region_write (
1713+ & self ,
1714+ region : & OpRegion ,
1715+ offset : usize ,
1716+ length : usize ,
1717+ value : u64 ,
1718+ ) -> Result < ( ) , AmlError > {
1719+ trace ! (
1720+ "Native field write. Region = {:?}, offset = {:#x}, length={:#x}, value={:#x}" ,
1721+ region, offset, length, value
1722+ ) ;
1723+
1724+ match region. space {
1725+ RegionSpace :: SystemMemory => Ok ( {
1726+ let address = region. base as usize + offset;
1727+ match length {
1728+ 1 => self . handler . write_u8 ( address, value as u8 ) ,
1729+ 2 => self . handler . write_u16 ( address, value as u16 ) ,
1730+ 4 => self . handler . write_u32 ( address, value as u32 ) ,
1731+ 8 => self . handler . write_u64 ( address, value) ,
1732+ _ => panic ! ( ) ,
1733+ }
1734+ } ) ,
1735+ RegionSpace :: SystemIO => Ok ( {
1736+ let address = region. base as u16 + offset as u16 ;
1737+ match length {
1738+ 1 => self . handler . write_io_u8 ( address, value as u8 ) ,
1739+ 2 => self . handler . write_io_u16 ( address, value as u16 ) ,
1740+ 4 => self . handler . write_io_u32 ( address, value as u32 ) ,
1741+ _ => panic ! ( ) ,
1742+ }
1743+ } ) ,
1744+ RegionSpace :: PciConfig => {
1745+ let address = self . pci_address_for_device ( & region. parent_device_path ) ?;
1746+ match length {
1747+ 1 => self . handler . write_pci_u8 ( address, offset as u16 , value as u8 ) ,
1748+ 2 => self . handler . write_pci_u16 ( address, offset as u16 , value as u16 ) ,
1749+ 4 => self . handler . write_pci_u32 ( address, offset as u16 , value as u32 ) ,
1750+ _ => panic ! ( ) ,
1751+ }
1752+ Ok ( ( ) )
1753+ }
1754+
1755+ RegionSpace :: EmbeddedControl
1756+ | RegionSpace :: SmBus
1757+ | RegionSpace :: SystemCmos
1758+ | RegionSpace :: PciBarTarget
1759+ | RegionSpace :: Ipmi
1760+ | RegionSpace :: GeneralPurposeIo
1761+ | RegionSpace :: GenericSerialBus
1762+ | RegionSpace :: Pcc
1763+ | RegionSpace :: Oem ( _) => {
1764+ if let Some ( handler) = self . region_handlers . lock ( ) . get ( & region. space ) {
1765+ todo ! ( "Utilise handler" ) ;
1766+ } else {
1767+ Err ( AmlError :: NoHandlerForRegionAccess ( region. space ) )
16611768 }
16621769 }
16631770 }
16641771 }
1772+
1773+ fn pci_address_for_device ( & self , path : & AmlName ) -> Result < PciAddress , AmlError > {
1774+ /*
1775+ * TODO: it's not ideal to do these reads for every native access. See if we can
1776+ * cache them somewhere?
1777+ */
1778+ let seg = match self . invoke_method_if_present ( AmlName :: from_str ( "_SEG" ) . unwrap ( ) . resolve ( path) ?, vec ! [ ] ) ? {
1779+ Some ( value) => value. as_integer ( ) ?,
1780+ None => 0 ,
1781+ } ;
1782+ let bus = match self . invoke_method_if_present ( AmlName :: from_str ( "_BBR" ) . unwrap ( ) . resolve ( path) ?, vec ! [ ] ) ? {
1783+ Some ( value) => value. as_integer ( ) ?,
1784+ None => 0 ,
1785+ } ;
1786+ let ( device, function) = {
1787+ let adr = self . invoke_method_if_present ( AmlName :: from_str ( "_ADR" ) . unwrap ( ) . resolve ( path) ?, vec ! [ ] ) ?;
1788+ let adr = match adr {
1789+ Some ( adr) => adr. as_integer ( ) ?,
1790+ None => 0 ,
1791+ } ;
1792+ ( adr. get_bits ( 16 ..32 ) , adr. get_bits ( 0 ..16 ) )
1793+ } ;
1794+ Ok ( PciAddress :: new ( seg as u16 , bus as u8 , device as u8 , function as u8 ) )
1795+ }
16651796}
16661797
16671798/// A `MethodContext` represents a piece of running AML code - either a real method, or the
@@ -2241,6 +2372,7 @@ pub enum AmlError {
22412372 InvalidResourceDescriptor ,
22422373 UnexpectedResourceType ,
22432374
2375+ NoHandlerForRegionAccess ( RegionSpace ) ,
22442376 MutexAquireTimeout ,
22452377
22462378 PrtInvalidAddress ,
0 commit comments